| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.06840324035921473, | |
| "eval_steps": 500, | |
| "global_step": 21000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0003257297159962606, | |
| "grad_norm": 2.2308592796325684, | |
| "learning_rate": 4.99853416853153e-05, | |
| "loss": 1.4483, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.0006514594319925212, | |
| "grad_norm": 2.3997225761413574, | |
| "learning_rate": 4.996905466899897e-05, | |
| "loss": 1.3276, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.0009771891479887819, | |
| "grad_norm": 1.4687339067459106, | |
| "learning_rate": 4.995276765268264e-05, | |
| "loss": 1.3394, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.0013029188639850425, | |
| "grad_norm": 0.6583470702171326, | |
| "learning_rate": 4.993648063636631e-05, | |
| "loss": 1.3245, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.0016286485799813031, | |
| "grad_norm": 1.6252340078353882, | |
| "learning_rate": 4.992019362004997e-05, | |
| "loss": 1.3249, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.0019543782959775637, | |
| "grad_norm": 2.0806777477264404, | |
| "learning_rate": 4.9903906603733634e-05, | |
| "loss": 1.32, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.002280108011973824, | |
| "grad_norm": 1.376539707183838, | |
| "learning_rate": 4.988761958741731e-05, | |
| "loss": 1.3133, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.002605837727970085, | |
| "grad_norm": 2.234644889831543, | |
| "learning_rate": 4.987133257110097e-05, | |
| "loss": 1.3179, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.0029315674439663454, | |
| "grad_norm": 1.4599684476852417, | |
| "learning_rate": 4.985504555478464e-05, | |
| "loss": 1.3097, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.0032572971599626062, | |
| "grad_norm": 1.7078094482421875, | |
| "learning_rate": 4.9838758538468304e-05, | |
| "loss": 1.3083, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.0035830268759588666, | |
| "grad_norm": 0.6953567266464233, | |
| "learning_rate": 4.9822471522151976e-05, | |
| "loss": 1.3075, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.0039087565919551275, | |
| "grad_norm": 1.225602626800537, | |
| "learning_rate": 4.980618450583564e-05, | |
| "loss": 1.3054, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.004234486307951388, | |
| "grad_norm": 1.3010519742965698, | |
| "learning_rate": 4.978989748951931e-05, | |
| "loss": 1.3066, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.004560216023947648, | |
| "grad_norm": 0.6475724577903748, | |
| "learning_rate": 4.9773610473202974e-05, | |
| "loss": 1.3109, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.004885945739943909, | |
| "grad_norm": 1.046614646911621, | |
| "learning_rate": 4.975732345688664e-05, | |
| "loss": 1.3074, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.00521167545594017, | |
| "grad_norm": 1.113573670387268, | |
| "learning_rate": 4.974103644057031e-05, | |
| "loss": 1.3083, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.005537405171936431, | |
| "grad_norm": 1.4273550510406494, | |
| "learning_rate": 4.972474942425398e-05, | |
| "loss": 1.3018, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.005863134887932691, | |
| "grad_norm": 0.5519908666610718, | |
| "learning_rate": 4.970846240793764e-05, | |
| "loss": 1.2945, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.006188864603928952, | |
| "grad_norm": 0.6653416156768799, | |
| "learning_rate": 4.969217539162131e-05, | |
| "loss": 1.3004, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.0065145943199252125, | |
| "grad_norm": 0.732170581817627, | |
| "learning_rate": 4.9675888375304975e-05, | |
| "loss": 1.3014, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.006840324035921473, | |
| "grad_norm": 0.405608594417572, | |
| "learning_rate": 4.965960135898865e-05, | |
| "loss": 1.2939, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.007166053751917733, | |
| "grad_norm": 0.9849847555160522, | |
| "learning_rate": 4.9643314342672306e-05, | |
| "loss": 1.2922, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.007491783467913994, | |
| "grad_norm": 0.7152832746505737, | |
| "learning_rate": 4.962702732635598e-05, | |
| "loss": 1.2905, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.007817513183910255, | |
| "grad_norm": 1.1164734363555908, | |
| "learning_rate": 4.9610740310039644e-05, | |
| "loss": 1.3024, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.008143242899906516, | |
| "grad_norm": 0.574243426322937, | |
| "learning_rate": 4.959445329372332e-05, | |
| "loss": 1.2944, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.008468972615902777, | |
| "grad_norm": 0.6976324319839478, | |
| "learning_rate": 4.9578166277406976e-05, | |
| "loss": 1.2939, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.008794702331899037, | |
| "grad_norm": 0.4648737609386444, | |
| "learning_rate": 4.956187926109064e-05, | |
| "loss": 1.2841, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.009120432047895297, | |
| "grad_norm": 1.189271092414856, | |
| "learning_rate": 4.9545592244774314e-05, | |
| "loss": 1.294, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.009446161763891557, | |
| "grad_norm": 0.6437670588493347, | |
| "learning_rate": 4.952930522845798e-05, | |
| "loss": 1.2882, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.009771891479887818, | |
| "grad_norm": 1.591304898262024, | |
| "learning_rate": 4.9513018212141646e-05, | |
| "loss": 1.2805, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.010097621195884079, | |
| "grad_norm": 0.2836475670337677, | |
| "learning_rate": 4.949673119582531e-05, | |
| "loss": 1.2802, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.01042335091188034, | |
| "grad_norm": 1.304417610168457, | |
| "learning_rate": 4.9480444179508984e-05, | |
| "loss": 1.2833, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.0107490806278766, | |
| "grad_norm": 0.27579864859580994, | |
| "learning_rate": 4.946415716319265e-05, | |
| "loss": 1.2852, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.011074810343872862, | |
| "grad_norm": 1.1080585718154907, | |
| "learning_rate": 4.9447870146876315e-05, | |
| "loss": 1.289, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.011400540059869122, | |
| "grad_norm": 0.2783690392971039, | |
| "learning_rate": 4.943158313055998e-05, | |
| "loss": 1.2885, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.011726269775865382, | |
| "grad_norm": 0.6603112816810608, | |
| "learning_rate": 4.941529611424365e-05, | |
| "loss": 1.2882, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.012051999491861642, | |
| "grad_norm": 0.9498095512390137, | |
| "learning_rate": 4.939900909792732e-05, | |
| "loss": 1.2835, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.012377729207857903, | |
| "grad_norm": 0.5274548530578613, | |
| "learning_rate": 4.9382722081610985e-05, | |
| "loss": 1.279, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.012703458923854164, | |
| "grad_norm": 0.5299821496009827, | |
| "learning_rate": 4.936643506529465e-05, | |
| "loss": 1.2879, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.013029188639850425, | |
| "grad_norm": 1.0898863077163696, | |
| "learning_rate": 4.9350148048978316e-05, | |
| "loss": 1.2913, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.013354918355846686, | |
| "grad_norm": 0.6892501711845398, | |
| "learning_rate": 4.933386103266198e-05, | |
| "loss": 1.2835, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 0.013680648071842947, | |
| "grad_norm": 0.9103847146034241, | |
| "learning_rate": 4.9317574016345655e-05, | |
| "loss": 1.2876, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 0.014006377787839207, | |
| "grad_norm": 0.8750960826873779, | |
| "learning_rate": 4.9301287000029314e-05, | |
| "loss": 1.2761, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 0.014332107503835467, | |
| "grad_norm": 1.7296843528747559, | |
| "learning_rate": 4.9284999983712986e-05, | |
| "loss": 1.2825, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 0.014657837219831727, | |
| "grad_norm": 0.7019387483596802, | |
| "learning_rate": 4.926871296739665e-05, | |
| "loss": 1.2774, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.014983566935827988, | |
| "grad_norm": 0.9353660345077515, | |
| "learning_rate": 4.9252425951080324e-05, | |
| "loss": 1.2701, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 0.015309296651824249, | |
| "grad_norm": 0.7081932425498962, | |
| "learning_rate": 4.923613893476399e-05, | |
| "loss": 1.276, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 0.01563502636782051, | |
| "grad_norm": 0.8366962671279907, | |
| "learning_rate": 4.9219851918447656e-05, | |
| "loss": 1.2767, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 0.01596075608381677, | |
| "grad_norm": 1.765871286392212, | |
| "learning_rate": 4.920356490213132e-05, | |
| "loss": 1.2617, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 0.01628648579981303, | |
| "grad_norm": 0.2926379442214966, | |
| "learning_rate": 4.918727788581499e-05, | |
| "loss": 1.2762, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.01661221551580929, | |
| "grad_norm": 1.1176525354385376, | |
| "learning_rate": 4.917099086949866e-05, | |
| "loss": 1.2647, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 0.016937945231805553, | |
| "grad_norm": 0.384264200925827, | |
| "learning_rate": 4.915470385318232e-05, | |
| "loss": 1.2628, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 0.017263674947801812, | |
| "grad_norm": 1.5339140892028809, | |
| "learning_rate": 4.913841683686599e-05, | |
| "loss": 1.2692, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 0.017589404663798075, | |
| "grad_norm": 1.2026703357696533, | |
| "learning_rate": 4.912212982054966e-05, | |
| "loss": 1.2618, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 0.017915134379794334, | |
| "grad_norm": 0.6754997968673706, | |
| "learning_rate": 4.910584280423333e-05, | |
| "loss": 1.2495, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.018240864095790593, | |
| "grad_norm": 0.8240428566932678, | |
| "learning_rate": 4.908955578791699e-05, | |
| "loss": 1.2498, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 0.018566593811786856, | |
| "grad_norm": 0.6363087892532349, | |
| "learning_rate": 4.9073268771600654e-05, | |
| "loss": 1.2514, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 0.018892323527783115, | |
| "grad_norm": 1.393833875656128, | |
| "learning_rate": 4.905698175528433e-05, | |
| "loss": 1.2509, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 0.019218053243779377, | |
| "grad_norm": 0.6422170996665955, | |
| "learning_rate": 4.904069473896799e-05, | |
| "loss": 1.2405, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 0.019543782959775637, | |
| "grad_norm": 0.7575420141220093, | |
| "learning_rate": 4.902440772265166e-05, | |
| "loss": 1.2241, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.0198695126757719, | |
| "grad_norm": 0.7148196697235107, | |
| "learning_rate": 4.9008120706335324e-05, | |
| "loss": 1.2372, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 0.020195242391768158, | |
| "grad_norm": 1.1207329034805298, | |
| "learning_rate": 4.8991833690018996e-05, | |
| "loss": 1.2372, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 0.02052097210776442, | |
| "grad_norm": 1.3915568590164185, | |
| "learning_rate": 4.897554667370266e-05, | |
| "loss": 1.2129, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 0.02084670182376068, | |
| "grad_norm": 0.8674553036689758, | |
| "learning_rate": 4.895925965738633e-05, | |
| "loss": 1.2262, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 0.02117243153975694, | |
| "grad_norm": 0.7640644311904907, | |
| "learning_rate": 4.8942972641069994e-05, | |
| "loss": 1.1998, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.0214981612557532, | |
| "grad_norm": 0.7928606271743774, | |
| "learning_rate": 4.892668562475366e-05, | |
| "loss": 1.1776, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 0.02182389097174946, | |
| "grad_norm": 1.1644946336746216, | |
| "learning_rate": 4.891039860843733e-05, | |
| "loss": 1.1916, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 0.022149620687745723, | |
| "grad_norm": 1.1310213804244995, | |
| "learning_rate": 4.8894111592121e-05, | |
| "loss": 1.1786, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 0.022475350403741982, | |
| "grad_norm": 1.3858141899108887, | |
| "learning_rate": 4.887782457580466e-05, | |
| "loss": 1.1728, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 0.022801080119738245, | |
| "grad_norm": 3.814767360687256, | |
| "learning_rate": 4.886153755948833e-05, | |
| "loss": 1.1384, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.023126809835734504, | |
| "grad_norm": 1.2411885261535645, | |
| "learning_rate": 4.8845250543171995e-05, | |
| "loss": 1.1588, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 0.023452539551730763, | |
| "grad_norm": 1.4492881298065186, | |
| "learning_rate": 4.882896352685567e-05, | |
| "loss": 1.1266, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 0.023778269267727026, | |
| "grad_norm": 0.8389878869056702, | |
| "learning_rate": 4.8812676510539326e-05, | |
| "loss": 1.1446, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 0.024103998983723285, | |
| "grad_norm": 0.33955487608909607, | |
| "learning_rate": 4.8796389494223e-05, | |
| "loss": 1.1111, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 0.024429728699719547, | |
| "grad_norm": 0.7004753351211548, | |
| "learning_rate": 4.8780102477906664e-05, | |
| "loss": 1.0954, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.024755458415715807, | |
| "grad_norm": 0.7213209271430969, | |
| "learning_rate": 4.876381546159034e-05, | |
| "loss": 1.1123, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 0.02508118813171207, | |
| "grad_norm": 0.960991382598877, | |
| "learning_rate": 4.8747528445273996e-05, | |
| "loss": 1.0982, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 0.025406917847708328, | |
| "grad_norm": 0.6955804228782654, | |
| "learning_rate": 4.873124142895766e-05, | |
| "loss": 1.0827, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 0.02573264756370459, | |
| "grad_norm": 0.47498619556427, | |
| "learning_rate": 4.8714954412641334e-05, | |
| "loss": 1.1043, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 0.02605837727970085, | |
| "grad_norm": 0.304063618183136, | |
| "learning_rate": 4.8698667396325e-05, | |
| "loss": 1.0699, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.02638410699569711, | |
| "grad_norm": 0.9996088743209839, | |
| "learning_rate": 4.8682380380008666e-05, | |
| "loss": 1.0697, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 0.02670983671169337, | |
| "grad_norm": 0.5986392498016357, | |
| "learning_rate": 4.866609336369233e-05, | |
| "loss": 1.0733, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 0.02703556642768963, | |
| "grad_norm": 0.41347017884254456, | |
| "learning_rate": 4.8649806347376004e-05, | |
| "loss": 1.0643, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 0.027361296143685893, | |
| "grad_norm": 0.3976612687110901, | |
| "learning_rate": 4.863351933105967e-05, | |
| "loss": 1.0401, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 0.027687025859682152, | |
| "grad_norm": 1.1716387271881104, | |
| "learning_rate": 4.8617232314743335e-05, | |
| "loss": 1.0298, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.028012755575678415, | |
| "grad_norm": 0.7384105324745178, | |
| "learning_rate": 4.8600945298427e-05, | |
| "loss": 1.0223, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 0.028338485291674674, | |
| "grad_norm": 0.517280638217926, | |
| "learning_rate": 4.858465828211067e-05, | |
| "loss": 1.0445, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 0.028664215007670933, | |
| "grad_norm": 0.7129126787185669, | |
| "learning_rate": 4.856837126579434e-05, | |
| "loss": 1.0508, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 0.028989944723667196, | |
| "grad_norm": 0.35596320033073425, | |
| "learning_rate": 4.8552084249478005e-05, | |
| "loss": 1.0296, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 0.029315674439663455, | |
| "grad_norm": 0.9362590909004211, | |
| "learning_rate": 4.853579723316167e-05, | |
| "loss": 1.0785, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.029641404155659717, | |
| "grad_norm": 0.8223775625228882, | |
| "learning_rate": 4.8519510216845336e-05, | |
| "loss": 1.043, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 0.029967133871655977, | |
| "grad_norm": 0.7149192690849304, | |
| "learning_rate": 4.8503223200529e-05, | |
| "loss": 1.0036, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 0.03029286358765224, | |
| "grad_norm": 0.5907948017120361, | |
| "learning_rate": 4.8486936184212675e-05, | |
| "loss": 1.0408, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 0.030618593303648498, | |
| "grad_norm": 0.6083859801292419, | |
| "learning_rate": 4.847064916789634e-05, | |
| "loss": 1.0313, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 0.03094432301964476, | |
| "grad_norm": 0.5470224618911743, | |
| "learning_rate": 4.8454362151580006e-05, | |
| "loss": 1.0395, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.03127005273564102, | |
| "grad_norm": 0.9455150961875916, | |
| "learning_rate": 4.843807513526367e-05, | |
| "loss": 1.0132, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 0.03159578245163728, | |
| "grad_norm": 0.9068177938461304, | |
| "learning_rate": 4.8421788118947344e-05, | |
| "loss": 1.0219, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 0.03192151216763354, | |
| "grad_norm": 0.6018943190574646, | |
| "learning_rate": 4.840550110263101e-05, | |
| "loss": 0.9966, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 0.032247241883629804, | |
| "grad_norm": 1.1521615982055664, | |
| "learning_rate": 4.838921408631467e-05, | |
| "loss": 0.9782, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 0.03257297159962606, | |
| "grad_norm": 0.33281368017196655, | |
| "learning_rate": 4.837292706999834e-05, | |
| "loss": 1.0325, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.03289870131562232, | |
| "grad_norm": 0.8903327584266663, | |
| "learning_rate": 4.835664005368201e-05, | |
| "loss": 0.9889, | |
| "step": 10100 | |
| }, | |
| { | |
| "epoch": 0.03322443103161858, | |
| "grad_norm": 0.5526803731918335, | |
| "learning_rate": 4.834035303736568e-05, | |
| "loss": 1.0018, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 0.03355016074761485, | |
| "grad_norm": 0.8086706399917603, | |
| "learning_rate": 4.832406602104934e-05, | |
| "loss": 1.0189, | |
| "step": 10300 | |
| }, | |
| { | |
| "epoch": 0.03387589046361111, | |
| "grad_norm": 0.6990864276885986, | |
| "learning_rate": 4.830777900473301e-05, | |
| "loss": 0.996, | |
| "step": 10400 | |
| }, | |
| { | |
| "epoch": 0.034201620179607366, | |
| "grad_norm": 0.4859602451324463, | |
| "learning_rate": 4.829149198841668e-05, | |
| "loss": 0.992, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.034527349895603625, | |
| "grad_norm": 1.2284592390060425, | |
| "learning_rate": 4.827520497210034e-05, | |
| "loss": 1.0139, | |
| "step": 10600 | |
| }, | |
| { | |
| "epoch": 0.034853079611599884, | |
| "grad_norm": 0.6529733538627625, | |
| "learning_rate": 4.825891795578401e-05, | |
| "loss": 1.025, | |
| "step": 10700 | |
| }, | |
| { | |
| "epoch": 0.03517880932759615, | |
| "grad_norm": 0.6755232810974121, | |
| "learning_rate": 4.8242630939467674e-05, | |
| "loss": 1.0123, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 0.03550453904359241, | |
| "grad_norm": 0.9006055593490601, | |
| "learning_rate": 4.8226343923151347e-05, | |
| "loss": 0.9936, | |
| "step": 10900 | |
| }, | |
| { | |
| "epoch": 0.03583026875958867, | |
| "grad_norm": 0.7058572769165039, | |
| "learning_rate": 4.821005690683501e-05, | |
| "loss": 0.934, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.03615599847558493, | |
| "grad_norm": 0.4535008668899536, | |
| "learning_rate": 4.819376989051868e-05, | |
| "loss": 1.0269, | |
| "step": 11100 | |
| }, | |
| { | |
| "epoch": 0.036481728191581186, | |
| "grad_norm": 0.39823395013809204, | |
| "learning_rate": 4.8177482874202344e-05, | |
| "loss": 0.9866, | |
| "step": 11200 | |
| }, | |
| { | |
| "epoch": 0.03680745790757745, | |
| "grad_norm": 0.8109054565429688, | |
| "learning_rate": 4.816119585788601e-05, | |
| "loss": 1.0209, | |
| "step": 11300 | |
| }, | |
| { | |
| "epoch": 0.03713318762357371, | |
| "grad_norm": 0.760396420955658, | |
| "learning_rate": 4.814490884156968e-05, | |
| "loss": 0.9711, | |
| "step": 11400 | |
| }, | |
| { | |
| "epoch": 0.03745891733956997, | |
| "grad_norm": 0.8584955334663391, | |
| "learning_rate": 4.812862182525335e-05, | |
| "loss": 1.0151, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.03778464705556623, | |
| "grad_norm": 1.104041576385498, | |
| "learning_rate": 4.8112334808937013e-05, | |
| "loss": 0.9826, | |
| "step": 11600 | |
| }, | |
| { | |
| "epoch": 0.038110376771562496, | |
| "grad_norm": 0.6111257672309875, | |
| "learning_rate": 4.809604779262068e-05, | |
| "loss": 0.9524, | |
| "step": 11700 | |
| }, | |
| { | |
| "epoch": 0.038436106487558755, | |
| "grad_norm": 0.6601366996765137, | |
| "learning_rate": 4.807976077630435e-05, | |
| "loss": 0.9527, | |
| "step": 11800 | |
| }, | |
| { | |
| "epoch": 0.038761836203555014, | |
| "grad_norm": 0.4624398350715637, | |
| "learning_rate": 4.806347375998802e-05, | |
| "loss": 1.0077, | |
| "step": 11900 | |
| }, | |
| { | |
| "epoch": 0.03908756591955127, | |
| "grad_norm": 0.2786065638065338, | |
| "learning_rate": 4.8047186743671676e-05, | |
| "loss": 0.956, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.03941329563554753, | |
| "grad_norm": 1.0275955200195312, | |
| "learning_rate": 4.803089972735535e-05, | |
| "loss": 0.9484, | |
| "step": 12100 | |
| }, | |
| { | |
| "epoch": 0.0397390253515438, | |
| "grad_norm": 0.6198407411575317, | |
| "learning_rate": 4.8014612711039015e-05, | |
| "loss": 0.9847, | |
| "step": 12200 | |
| }, | |
| { | |
| "epoch": 0.04006475506754006, | |
| "grad_norm": 0.5880489945411682, | |
| "learning_rate": 4.799832569472269e-05, | |
| "loss": 0.9559, | |
| "step": 12300 | |
| }, | |
| { | |
| "epoch": 0.040390484783536316, | |
| "grad_norm": 0.39753594994544983, | |
| "learning_rate": 4.7982038678406346e-05, | |
| "loss": 0.9489, | |
| "step": 12400 | |
| }, | |
| { | |
| "epoch": 0.040716214499532576, | |
| "grad_norm": 0.5815085768699646, | |
| "learning_rate": 4.796575166209002e-05, | |
| "loss": 0.9567, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.04104194421552884, | |
| "grad_norm": 0.8463611602783203, | |
| "learning_rate": 4.7949464645773684e-05, | |
| "loss": 0.9706, | |
| "step": 12600 | |
| }, | |
| { | |
| "epoch": 0.0413676739315251, | |
| "grad_norm": 0.7260481715202332, | |
| "learning_rate": 4.793317762945736e-05, | |
| "loss": 1.0032, | |
| "step": 12700 | |
| }, | |
| { | |
| "epoch": 0.04169340364752136, | |
| "grad_norm": 0.6970434188842773, | |
| "learning_rate": 4.7916890613141016e-05, | |
| "loss": 0.9559, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 0.04201913336351762, | |
| "grad_norm": 0.6083927750587463, | |
| "learning_rate": 4.790060359682468e-05, | |
| "loss": 0.9558, | |
| "step": 12900 | |
| }, | |
| { | |
| "epoch": 0.04234486307951388, | |
| "grad_norm": 0.4736403524875641, | |
| "learning_rate": 4.7884316580508354e-05, | |
| "loss": 0.9444, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.042670592795510144, | |
| "grad_norm": 0.34586021304130554, | |
| "learning_rate": 4.786802956419202e-05, | |
| "loss": 0.9186, | |
| "step": 13100 | |
| }, | |
| { | |
| "epoch": 0.0429963225115064, | |
| "grad_norm": 0.5979019403457642, | |
| "learning_rate": 4.7851742547875685e-05, | |
| "loss": 0.9367, | |
| "step": 13200 | |
| }, | |
| { | |
| "epoch": 0.04332205222750266, | |
| "grad_norm": 1.0827624797821045, | |
| "learning_rate": 4.783545553155935e-05, | |
| "loss": 0.9324, | |
| "step": 13300 | |
| }, | |
| { | |
| "epoch": 0.04364778194349892, | |
| "grad_norm": 1.1920030117034912, | |
| "learning_rate": 4.7819168515243024e-05, | |
| "loss": 0.9367, | |
| "step": 13400 | |
| }, | |
| { | |
| "epoch": 0.04397351165949519, | |
| "grad_norm": 0.6469812989234924, | |
| "learning_rate": 4.780288149892669e-05, | |
| "loss": 0.9815, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.04429924137549145, | |
| "grad_norm": 0.8156530857086182, | |
| "learning_rate": 4.7786594482610355e-05, | |
| "loss": 0.9679, | |
| "step": 13600 | |
| }, | |
| { | |
| "epoch": 0.044624971091487706, | |
| "grad_norm": 1.2997325658798218, | |
| "learning_rate": 4.777030746629402e-05, | |
| "loss": 0.9358, | |
| "step": 13700 | |
| }, | |
| { | |
| "epoch": 0.044950700807483965, | |
| "grad_norm": 0.42360150814056396, | |
| "learning_rate": 4.7754020449977687e-05, | |
| "loss": 0.9326, | |
| "step": 13800 | |
| }, | |
| { | |
| "epoch": 0.045276430523480224, | |
| "grad_norm": 0.7316247820854187, | |
| "learning_rate": 4.773773343366136e-05, | |
| "loss": 0.9283, | |
| "step": 13900 | |
| }, | |
| { | |
| "epoch": 0.04560216023947649, | |
| "grad_norm": 0.5978175401687622, | |
| "learning_rate": 4.7721446417345025e-05, | |
| "loss": 0.9699, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.04592788995547275, | |
| "grad_norm": 0.5278334617614746, | |
| "learning_rate": 4.770515940102869e-05, | |
| "loss": 0.99, | |
| "step": 14100 | |
| }, | |
| { | |
| "epoch": 0.04625361967146901, | |
| "grad_norm": 0.7452822327613831, | |
| "learning_rate": 4.7688872384712356e-05, | |
| "loss": 0.8824, | |
| "step": 14200 | |
| }, | |
| { | |
| "epoch": 0.04657934938746527, | |
| "grad_norm": 0.4158065617084503, | |
| "learning_rate": 4.767258536839602e-05, | |
| "loss": 0.9076, | |
| "step": 14300 | |
| }, | |
| { | |
| "epoch": 0.046905079103461526, | |
| "grad_norm": 0.6929590106010437, | |
| "learning_rate": 4.7656298352079694e-05, | |
| "loss": 0.926, | |
| "step": 14400 | |
| }, | |
| { | |
| "epoch": 0.04723080881945779, | |
| "grad_norm": 0.8249752521514893, | |
| "learning_rate": 4.764001133576336e-05, | |
| "loss": 0.9342, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.04755653853545405, | |
| "grad_norm": 0.6523115038871765, | |
| "learning_rate": 4.7623724319447026e-05, | |
| "loss": 0.9312, | |
| "step": 14600 | |
| }, | |
| { | |
| "epoch": 0.04788226825145031, | |
| "grad_norm": 0.7809571027755737, | |
| "learning_rate": 4.760743730313069e-05, | |
| "loss": 0.927, | |
| "step": 14700 | |
| }, | |
| { | |
| "epoch": 0.04820799796744657, | |
| "grad_norm": 0.4370424747467041, | |
| "learning_rate": 4.7591150286814364e-05, | |
| "loss": 0.9275, | |
| "step": 14800 | |
| }, | |
| { | |
| "epoch": 0.048533727683442836, | |
| "grad_norm": 0.8082228302955627, | |
| "learning_rate": 4.757486327049803e-05, | |
| "loss": 0.9524, | |
| "step": 14900 | |
| }, | |
| { | |
| "epoch": 0.048859457399439095, | |
| "grad_norm": 0.7073273658752441, | |
| "learning_rate": 4.755857625418169e-05, | |
| "loss": 0.9069, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.049185187115435354, | |
| "grad_norm": 0.9150802493095398, | |
| "learning_rate": 4.754228923786536e-05, | |
| "loss": 0.9669, | |
| "step": 15100 | |
| }, | |
| { | |
| "epoch": 0.04951091683143161, | |
| "grad_norm": 0.6621295809745789, | |
| "learning_rate": 4.752600222154903e-05, | |
| "loss": 0.9117, | |
| "step": 15200 | |
| }, | |
| { | |
| "epoch": 0.04983664654742787, | |
| "grad_norm": 1.1658425331115723, | |
| "learning_rate": 4.75097152052327e-05, | |
| "loss": 0.9061, | |
| "step": 15300 | |
| }, | |
| { | |
| "epoch": 0.05016237626342414, | |
| "grad_norm": 1.1669522523880005, | |
| "learning_rate": 4.749342818891636e-05, | |
| "loss": 0.9625, | |
| "step": 15400 | |
| }, | |
| { | |
| "epoch": 0.0504881059794204, | |
| "grad_norm": 0.6995384693145752, | |
| "learning_rate": 4.747714117260003e-05, | |
| "loss": 0.9098, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.050813835695416656, | |
| "grad_norm": 0.5169076919555664, | |
| "learning_rate": 4.74608541562837e-05, | |
| "loss": 0.9243, | |
| "step": 15600 | |
| }, | |
| { | |
| "epoch": 0.051139565411412916, | |
| "grad_norm": 0.33565372228622437, | |
| "learning_rate": 4.744456713996736e-05, | |
| "loss": 0.9375, | |
| "step": 15700 | |
| }, | |
| { | |
| "epoch": 0.05146529512740918, | |
| "grad_norm": 0.4140024781227112, | |
| "learning_rate": 4.742828012365103e-05, | |
| "loss": 0.919, | |
| "step": 15800 | |
| }, | |
| { | |
| "epoch": 0.05179102484340544, | |
| "grad_norm": 0.9499224424362183, | |
| "learning_rate": 4.7411993107334694e-05, | |
| "loss": 0.9034, | |
| "step": 15900 | |
| }, | |
| { | |
| "epoch": 0.0521167545594017, | |
| "grad_norm": 0.8801336288452148, | |
| "learning_rate": 4.7395706091018366e-05, | |
| "loss": 0.881, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.05244248427539796, | |
| "grad_norm": 0.7208696007728577, | |
| "learning_rate": 4.737941907470203e-05, | |
| "loss": 0.8518, | |
| "step": 16100 | |
| }, | |
| { | |
| "epoch": 0.05276821399139422, | |
| "grad_norm": 0.5132054686546326, | |
| "learning_rate": 4.73631320583857e-05, | |
| "loss": 0.8933, | |
| "step": 16200 | |
| }, | |
| { | |
| "epoch": 0.053093943707390484, | |
| "grad_norm": 0.6521860957145691, | |
| "learning_rate": 4.7346845042069364e-05, | |
| "loss": 0.9332, | |
| "step": 16300 | |
| }, | |
| { | |
| "epoch": 0.05341967342338674, | |
| "grad_norm": 0.7121620178222656, | |
| "learning_rate": 4.733055802575303e-05, | |
| "loss": 0.9067, | |
| "step": 16400 | |
| }, | |
| { | |
| "epoch": 0.053745403139383, | |
| "grad_norm": 0.5065134763717651, | |
| "learning_rate": 4.73142710094367e-05, | |
| "loss": 0.9062, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.05407113285537926, | |
| "grad_norm": 0.5855521559715271, | |
| "learning_rate": 4.729798399312037e-05, | |
| "loss": 0.915, | |
| "step": 16600 | |
| }, | |
| { | |
| "epoch": 0.05439686257137553, | |
| "grad_norm": 0.5392531156539917, | |
| "learning_rate": 4.728169697680403e-05, | |
| "loss": 0.9124, | |
| "step": 16700 | |
| }, | |
| { | |
| "epoch": 0.05472259228737179, | |
| "grad_norm": 0.6617989540100098, | |
| "learning_rate": 4.72654099604877e-05, | |
| "loss": 0.8594, | |
| "step": 16800 | |
| }, | |
| { | |
| "epoch": 0.055048322003368046, | |
| "grad_norm": 0.6459785103797913, | |
| "learning_rate": 4.724912294417137e-05, | |
| "loss": 0.9262, | |
| "step": 16900 | |
| }, | |
| { | |
| "epoch": 0.055374051719364305, | |
| "grad_norm": 0.34565970301628113, | |
| "learning_rate": 4.723283592785504e-05, | |
| "loss": 0.8747, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.055699781435360564, | |
| "grad_norm": 0.9510948061943054, | |
| "learning_rate": 4.7216548911538696e-05, | |
| "loss": 0.9027, | |
| "step": 17100 | |
| }, | |
| { | |
| "epoch": 0.05602551115135683, | |
| "grad_norm": 0.577192485332489, | |
| "learning_rate": 4.720026189522237e-05, | |
| "loss": 0.9192, | |
| "step": 17200 | |
| }, | |
| { | |
| "epoch": 0.05635124086735309, | |
| "grad_norm": 0.38653406500816345, | |
| "learning_rate": 4.7183974878906034e-05, | |
| "loss": 0.8759, | |
| "step": 17300 | |
| }, | |
| { | |
| "epoch": 0.05667697058334935, | |
| "grad_norm": 0.6405381560325623, | |
| "learning_rate": 4.716768786258971e-05, | |
| "loss": 0.8486, | |
| "step": 17400 | |
| }, | |
| { | |
| "epoch": 0.05700270029934561, | |
| "grad_norm": 0.6968704462051392, | |
| "learning_rate": 4.7151400846273366e-05, | |
| "loss": 0.903, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 0.057328430015341866, | |
| "grad_norm": 0.8094695210456848, | |
| "learning_rate": 4.713511382995704e-05, | |
| "loss": 0.864, | |
| "step": 17600 | |
| }, | |
| { | |
| "epoch": 0.05765415973133813, | |
| "grad_norm": 0.8325287103652954, | |
| "learning_rate": 4.7118826813640704e-05, | |
| "loss": 0.8886, | |
| "step": 17700 | |
| }, | |
| { | |
| "epoch": 0.05797988944733439, | |
| "grad_norm": 0.5068339705467224, | |
| "learning_rate": 4.710253979732437e-05, | |
| "loss": 0.8767, | |
| "step": 17800 | |
| }, | |
| { | |
| "epoch": 0.05830561916333065, | |
| "grad_norm": 0.7535611391067505, | |
| "learning_rate": 4.7086252781008036e-05, | |
| "loss": 0.8661, | |
| "step": 17900 | |
| }, | |
| { | |
| "epoch": 0.05863134887932691, | |
| "grad_norm": 0.9104974865913391, | |
| "learning_rate": 4.70699657646917e-05, | |
| "loss": 0.8612, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.058957078595323176, | |
| "grad_norm": 0.9106101989746094, | |
| "learning_rate": 4.7053678748375374e-05, | |
| "loss": 0.8885, | |
| "step": 18100 | |
| }, | |
| { | |
| "epoch": 0.059282808311319435, | |
| "grad_norm": 0.9990994334220886, | |
| "learning_rate": 4.703739173205904e-05, | |
| "loss": 0.9097, | |
| "step": 18200 | |
| }, | |
| { | |
| "epoch": 0.059608538027315694, | |
| "grad_norm": 0.6219133138656616, | |
| "learning_rate": 4.7021104715742705e-05, | |
| "loss": 0.8349, | |
| "step": 18300 | |
| }, | |
| { | |
| "epoch": 0.05993426774331195, | |
| "grad_norm": 0.28884798288345337, | |
| "learning_rate": 4.700481769942637e-05, | |
| "loss": 0.8359, | |
| "step": 18400 | |
| }, | |
| { | |
| "epoch": 0.06025999745930821, | |
| "grad_norm": 0.6142743229866028, | |
| "learning_rate": 4.698853068311004e-05, | |
| "loss": 0.8686, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 0.06058572717530448, | |
| "grad_norm": 0.7121238708496094, | |
| "learning_rate": 4.697224366679371e-05, | |
| "loss": 0.8318, | |
| "step": 18600 | |
| }, | |
| { | |
| "epoch": 0.06091145689130074, | |
| "grad_norm": 0.3502013683319092, | |
| "learning_rate": 4.6955956650477375e-05, | |
| "loss": 0.8353, | |
| "step": 18700 | |
| }, | |
| { | |
| "epoch": 0.061237186607296996, | |
| "grad_norm": 0.869159460067749, | |
| "learning_rate": 4.693966963416104e-05, | |
| "loss": 0.8811, | |
| "step": 18800 | |
| }, | |
| { | |
| "epoch": 0.061562916323293256, | |
| "grad_norm": 0.4008027911186218, | |
| "learning_rate": 4.6923382617844706e-05, | |
| "loss": 0.8595, | |
| "step": 18900 | |
| }, | |
| { | |
| "epoch": 0.06188864603928952, | |
| "grad_norm": 0.6609760522842407, | |
| "learning_rate": 4.690709560152838e-05, | |
| "loss": 0.8591, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.06221437575528578, | |
| "grad_norm": 0.41599878668785095, | |
| "learning_rate": 4.6890808585212045e-05, | |
| "loss": 0.8792, | |
| "step": 19100 | |
| }, | |
| { | |
| "epoch": 0.06254010547128204, | |
| "grad_norm": 0.8219528794288635, | |
| "learning_rate": 4.687452156889571e-05, | |
| "loss": 0.8469, | |
| "step": 19200 | |
| }, | |
| { | |
| "epoch": 0.0628658351872783, | |
| "grad_norm": 0.5383628010749817, | |
| "learning_rate": 4.6858234552579376e-05, | |
| "loss": 0.8619, | |
| "step": 19300 | |
| }, | |
| { | |
| "epoch": 0.06319156490327456, | |
| "grad_norm": 1.0892442464828491, | |
| "learning_rate": 4.684194753626304e-05, | |
| "loss": 0.8219, | |
| "step": 19400 | |
| }, | |
| { | |
| "epoch": 0.06351729461927082, | |
| "grad_norm": 0.7258702516555786, | |
| "learning_rate": 4.6825660519946714e-05, | |
| "loss": 0.8243, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 0.06384302433526708, | |
| "grad_norm": 1.2622634172439575, | |
| "learning_rate": 4.680937350363038e-05, | |
| "loss": 0.8619, | |
| "step": 19600 | |
| }, | |
| { | |
| "epoch": 0.06416875405126335, | |
| "grad_norm": 0.3901592195034027, | |
| "learning_rate": 4.6793086487314046e-05, | |
| "loss": 0.8315, | |
| "step": 19700 | |
| }, | |
| { | |
| "epoch": 0.06449448376725961, | |
| "grad_norm": 0.5976518392562866, | |
| "learning_rate": 4.677679947099771e-05, | |
| "loss": 0.8193, | |
| "step": 19800 | |
| }, | |
| { | |
| "epoch": 0.06482021348325587, | |
| "grad_norm": 1.0668984651565552, | |
| "learning_rate": 4.676051245468138e-05, | |
| "loss": 0.8381, | |
| "step": 19900 | |
| }, | |
| { | |
| "epoch": 0.06514594319925213, | |
| "grad_norm": 0.6844903826713562, | |
| "learning_rate": 4.674422543836505e-05, | |
| "loss": 0.8202, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.06547167291524839, | |
| "grad_norm": 0.6987929344177246, | |
| "learning_rate": 4.672793842204871e-05, | |
| "loss": 0.844, | |
| "step": 20100 | |
| }, | |
| { | |
| "epoch": 0.06579740263124464, | |
| "grad_norm": 1.0227413177490234, | |
| "learning_rate": 4.671165140573238e-05, | |
| "loss": 0.8093, | |
| "step": 20200 | |
| }, | |
| { | |
| "epoch": 0.0661231323472409, | |
| "grad_norm": 0.5901645421981812, | |
| "learning_rate": 4.669536438941605e-05, | |
| "loss": 0.8068, | |
| "step": 20300 | |
| }, | |
| { | |
| "epoch": 0.06644886206323716, | |
| "grad_norm": 0.7951213717460632, | |
| "learning_rate": 4.667907737309972e-05, | |
| "loss": 0.8581, | |
| "step": 20400 | |
| }, | |
| { | |
| "epoch": 0.06677459177923342, | |
| "grad_norm": 0.617341160774231, | |
| "learning_rate": 4.666279035678338e-05, | |
| "loss": 0.8427, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 0.0671003214952297, | |
| "grad_norm": 0.694558322429657, | |
| "learning_rate": 4.6646503340467044e-05, | |
| "loss": 0.8619, | |
| "step": 20600 | |
| }, | |
| { | |
| "epoch": 0.06742605121122595, | |
| "grad_norm": 0.6441329717636108, | |
| "learning_rate": 4.663021632415072e-05, | |
| "loss": 0.8866, | |
| "step": 20700 | |
| }, | |
| { | |
| "epoch": 0.06775178092722221, | |
| "grad_norm": 0.46440285444259644, | |
| "learning_rate": 4.661392930783438e-05, | |
| "loss": 0.8435, | |
| "step": 20800 | |
| }, | |
| { | |
| "epoch": 0.06807751064321847, | |
| "grad_norm": 0.42911046743392944, | |
| "learning_rate": 4.659764229151805e-05, | |
| "loss": 0.8145, | |
| "step": 20900 | |
| }, | |
| { | |
| "epoch": 0.06840324035921473, | |
| "grad_norm": 0.7508918046951294, | |
| "learning_rate": 4.6581355275201714e-05, | |
| "loss": 0.8576, | |
| "step": 21000 | |
| } | |
| ], | |
| "logging_steps": 100, | |
| "max_steps": 307003, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 3.12051889078272e+17, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |