tohuy2710's picture
upload first version
7de1b57 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 5.0,
"eval_steps": 500,
"global_step": 5555,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0288028802880288,
"grad_norm": 0.9036304950714111,
"learning_rate": 5.755395683453238e-07,
"loss": 1.0187,
"step": 32
},
{
"epoch": 0.0576057605760576,
"grad_norm": 2.02474045753479,
"learning_rate": 1.1510791366906476e-06,
"loss": 1.0342,
"step": 64
},
{
"epoch": 0.08640864086408641,
"grad_norm": 1.7671840190887451,
"learning_rate": 1.7266187050359715e-06,
"loss": 1.0534,
"step": 96
},
{
"epoch": 0.1152115211521152,
"grad_norm": 2.703395366668701,
"learning_rate": 2.302158273381295e-06,
"loss": 1.0047,
"step": 128
},
{
"epoch": 0.14401440144014402,
"grad_norm": 2.96386456489563,
"learning_rate": 2.877697841726619e-06,
"loss": 1.0125,
"step": 160
},
{
"epoch": 0.17281728172817282,
"grad_norm": 0.9527461528778076,
"learning_rate": 3.453237410071943e-06,
"loss": 0.9816,
"step": 192
},
{
"epoch": 0.20162016201620162,
"grad_norm": 2.197382926940918,
"learning_rate": 4.028776978417267e-06,
"loss": 1.05,
"step": 224
},
{
"epoch": 0.2304230423042304,
"grad_norm": 1.8034260272979736,
"learning_rate": 4.60431654676259e-06,
"loss": 0.8815,
"step": 256
},
{
"epoch": 0.25922592259225924,
"grad_norm": 2.3050363063812256,
"learning_rate": 5.179856115107914e-06,
"loss": 0.9027,
"step": 288
},
{
"epoch": 0.28802880288028804,
"grad_norm": 2.022681951522827,
"learning_rate": 5.755395683453238e-06,
"loss": 0.865,
"step": 320
},
{
"epoch": 0.31683168316831684,
"grad_norm": 1.3653255701065063,
"learning_rate": 6.330935251798561e-06,
"loss": 0.8724,
"step": 352
},
{
"epoch": 0.34563456345634563,
"grad_norm": 1.3452287912368774,
"learning_rate": 6.906474820143886e-06,
"loss": 0.8093,
"step": 384
},
{
"epoch": 0.37443744374437443,
"grad_norm": 1.9356400966644287,
"learning_rate": 7.48201438848921e-06,
"loss": 0.7579,
"step": 416
},
{
"epoch": 0.40324032403240323,
"grad_norm": 1.4237608909606934,
"learning_rate": 8.057553956834533e-06,
"loss": 0.7364,
"step": 448
},
{
"epoch": 0.43204320432043203,
"grad_norm": 1.9049218893051147,
"learning_rate": 8.633093525179856e-06,
"loss": 0.7033,
"step": 480
},
{
"epoch": 0.4608460846084608,
"grad_norm": 3.9214580059051514,
"learning_rate": 9.20863309352518e-06,
"loss": 0.696,
"step": 512
},
{
"epoch": 0.4896489648964896,
"grad_norm": 1.9122344255447388,
"learning_rate": 9.784172661870505e-06,
"loss": 0.697,
"step": 544
},
{
"epoch": 0.5184518451845185,
"grad_norm": 4.641770362854004,
"learning_rate": 9.999605063062183e-06,
"loss": 0.6691,
"step": 576
},
{
"epoch": 0.5472547254725473,
"grad_norm": 1.5034338235855103,
"learning_rate": 9.997330428740717e-06,
"loss": 0.6846,
"step": 608
},
{
"epoch": 0.5760576057605761,
"grad_norm": 1.306266188621521,
"learning_rate": 9.993034838418394e-06,
"loss": 0.6663,
"step": 640
},
{
"epoch": 0.6048604860486049,
"grad_norm": 4.800021648406982,
"learning_rate": 9.986720029262515e-06,
"loss": 0.6264,
"step": 672
},
{
"epoch": 0.6336633663366337,
"grad_norm": 1.366411566734314,
"learning_rate": 9.97838855502686e-06,
"loss": 0.6116,
"step": 704
},
{
"epoch": 0.6624662466246625,
"grad_norm": 1.8418484926223755,
"learning_rate": 9.968043785018917e-06,
"loss": 0.6244,
"step": 736
},
{
"epoch": 0.6912691269126913,
"grad_norm": 7.662785053253174,
"learning_rate": 9.955689902737327e-06,
"loss": 0.5972,
"step": 768
},
{
"epoch": 0.7200720072007201,
"grad_norm": 1.7773517370224,
"learning_rate": 9.941331904180025e-06,
"loss": 0.6061,
"step": 800
},
{
"epoch": 0.7488748874887489,
"grad_norm": 1.4460841417312622,
"learning_rate": 9.924975595823843e-06,
"loss": 0.5821,
"step": 832
},
{
"epoch": 0.7776777677767777,
"grad_norm": 2.224186897277832,
"learning_rate": 9.906627592276318e-06,
"loss": 0.6255,
"step": 864
},
{
"epoch": 0.8064806480648065,
"grad_norm": 1.6951428651809692,
"learning_rate": 9.886295313600703e-06,
"loss": 0.6272,
"step": 896
},
{
"epoch": 0.8352835283528353,
"grad_norm": 1.9330335855484009,
"learning_rate": 9.86398698231524e-06,
"loss": 0.5994,
"step": 928
},
{
"epoch": 0.8640864086408641,
"grad_norm": 1.4903757572174072,
"learning_rate": 9.839711620067906e-06,
"loss": 0.5589,
"step": 960
},
{
"epoch": 0.8928892889288929,
"grad_norm": 1.2906200885772705,
"learning_rate": 9.813479043988013e-06,
"loss": 0.5859,
"step": 992
},
{
"epoch": 0.9216921692169217,
"grad_norm": 1.8480700254440308,
"learning_rate": 9.785299862716072e-06,
"loss": 0.6045,
"step": 1024
},
{
"epoch": 0.9504950495049505,
"grad_norm": 1.5740501880645752,
"learning_rate": 9.755185472113595e-06,
"loss": 0.5862,
"step": 1056
},
{
"epoch": 0.9792979297929792,
"grad_norm": 2.271888017654419,
"learning_rate": 9.723148050654522e-06,
"loss": 0.5833,
"step": 1088
},
{
"epoch": 1.008100810081008,
"grad_norm": 0.8760111331939697,
"learning_rate": 9.689200554500162e-06,
"loss": 0.5658,
"step": 1120
},
{
"epoch": 1.036903690369037,
"grad_norm": 1.2301350831985474,
"learning_rate": 9.65335671225963e-06,
"loss": 0.5839,
"step": 1152
},
{
"epoch": 1.0657065706570656,
"grad_norm": 1.7237297296524048,
"learning_rate": 9.615631019437896e-06,
"loss": 0.5718,
"step": 1184
},
{
"epoch": 1.0945094509450946,
"grad_norm": 1.146071434020996,
"learning_rate": 9.576038732573702e-06,
"loss": 0.6036,
"step": 1216
},
{
"epoch": 1.1233123312331232,
"grad_norm": 1.4510979652404785,
"learning_rate": 9.53459586306971e-06,
"loss": 0.602,
"step": 1248
},
{
"epoch": 1.1521152115211521,
"grad_norm": 1.3618237972259521,
"learning_rate": 9.491319170717375e-06,
"loss": 0.5661,
"step": 1280
},
{
"epoch": 1.1809180918091808,
"grad_norm": 1.5930922031402588,
"learning_rate": 9.446226156919164e-06,
"loss": 0.5892,
"step": 1312
},
{
"epoch": 1.2097209720972097,
"grad_norm": 1.0816315412521362,
"learning_rate": 9.399335057610872e-06,
"loss": 0.5816,
"step": 1344
},
{
"epoch": 1.2385238523852384,
"grad_norm": 1.2080299854278564,
"learning_rate": 9.350664835886873e-06,
"loss": 0.5766,
"step": 1376
},
{
"epoch": 1.2673267326732673,
"grad_norm": 1.655219554901123,
"learning_rate": 9.300235174331309e-06,
"loss": 0.5925,
"step": 1408
},
{
"epoch": 1.296129612961296,
"grad_norm": 1.02821683883667,
"learning_rate": 9.248066467058315e-06,
"loss": 0.5726,
"step": 1440
},
{
"epoch": 1.324932493249325,
"grad_norm": 1.9538413286209106,
"learning_rate": 9.194179811464499e-06,
"loss": 0.5405,
"step": 1472
},
{
"epoch": 1.3537353735373538,
"grad_norm": 1.4714971780776978,
"learning_rate": 9.138596999696994e-06,
"loss": 0.5848,
"step": 1504
},
{
"epoch": 1.3825382538253825,
"grad_norm": 1.3316493034362793,
"learning_rate": 9.081340509840568e-06,
"loss": 0.5796,
"step": 1536
},
{
"epoch": 1.4113411341134112,
"grad_norm": 1.2864930629730225,
"learning_rate": 9.022433496827323e-06,
"loss": 0.5695,
"step": 1568
},
{
"epoch": 1.4401440144014401,
"grad_norm": 1.261751651763916,
"learning_rate": 8.961899783072673e-06,
"loss": 0.5779,
"step": 1600
},
{
"epoch": 1.468946894689469,
"grad_norm": 1.6343884468078613,
"learning_rate": 8.899763848841396e-06,
"loss": 0.6653,
"step": 1632
},
{
"epoch": 1.4977497749774977,
"grad_norm": 1.3842564821243286,
"learning_rate": 8.836050822347632e-06,
"loss": 0.6163,
"step": 1664
},
{
"epoch": 1.5265526552655264,
"grad_norm": 1.3212823867797852,
"learning_rate": 8.770786469592863e-06,
"loss": 0.5873,
"step": 1696
},
{
"epoch": 1.5553555355535553,
"grad_norm": 1.8528977632522583,
"learning_rate": 8.703997183945949e-06,
"loss": 0.5722,
"step": 1728
},
{
"epoch": 1.5841584158415842,
"grad_norm": 2.5258665084838867,
"learning_rate": 8.635709975469479e-06,
"loss": 0.5556,
"step": 1760
},
{
"epoch": 1.612961296129613,
"grad_norm": 1.2707765102386475,
"learning_rate": 8.565952459996684e-06,
"loss": 0.5536,
"step": 1792
},
{
"epoch": 1.6417641764176416,
"grad_norm": 1.631494164466858,
"learning_rate": 8.49475284796342e-06,
"loss": 0.5651,
"step": 1824
},
{
"epoch": 1.6705670567056705,
"grad_norm": 2.1226248741149902,
"learning_rate": 8.422139932999658e-06,
"loss": 0.5679,
"step": 1856
},
{
"epoch": 1.6993699369936994,
"grad_norm": 2.1261792182922363,
"learning_rate": 8.34814308028513e-06,
"loss": 0.5896,
"step": 1888
},
{
"epoch": 1.7281728172817283,
"grad_norm": 3.6276915073394775,
"learning_rate": 8.272792214673851e-06,
"loss": 0.5859,
"step": 1920
},
{
"epoch": 1.756975697569757,
"grad_norm": 1.1661372184753418,
"learning_rate": 8.19611780859229e-06,
"loss": 0.6355,
"step": 1952
},
{
"epoch": 1.7857785778577857,
"grad_norm": 1.1813215017318726,
"learning_rate": 8.118150869716101e-06,
"loss": 0.5659,
"step": 1984
},
{
"epoch": 1.8145814581458146,
"grad_norm": 1.6434768438339233,
"learning_rate": 8.038922928430408e-06,
"loss": 0.5681,
"step": 2016
},
{
"epoch": 1.8433843384338435,
"grad_norm": 2.215708017349243,
"learning_rate": 7.95846602507868e-06,
"loss": 0.5849,
"step": 2048
},
{
"epoch": 1.8721872187218722,
"grad_norm": 1.4538156986236572,
"learning_rate": 7.876812697005396e-06,
"loss": 0.5677,
"step": 2080
},
{
"epoch": 1.900990099009901,
"grad_norm": 1.8781392574310303,
"learning_rate": 7.79399596539771e-06,
"loss": 0.5612,
"step": 2112
},
{
"epoch": 1.9297929792979298,
"grad_norm": 2.6248340606689453,
"learning_rate": 7.710049321931453e-06,
"loss": 0.5578,
"step": 2144
},
{
"epoch": 1.9585958595859587,
"grad_norm": 2.893212080001831,
"learning_rate": 7.625006715226844e-06,
"loss": 0.5925,
"step": 2176
},
{
"epoch": 1.9873987398739874,
"grad_norm": 1.3693437576293945,
"learning_rate": 7.538902537119445e-06,
"loss": 0.5519,
"step": 2208
},
{
"epoch": 2.016201620162016,
"grad_norm": 0.9655335545539856,
"learning_rate": 7.451771608751854e-06,
"loss": 0.5848,
"step": 2240
},
{
"epoch": 2.045004500450045,
"grad_norm": 1.7546676397323608,
"learning_rate": 7.3636491664917746e-06,
"loss": 0.5676,
"step": 2272
},
{
"epoch": 2.073807380738074,
"grad_norm": 1.35713791847229,
"learning_rate": 7.274570847682192e-06,
"loss": 0.5995,
"step": 2304
},
{
"epoch": 2.102610261026103,
"grad_norm": 1.368743896484375,
"learning_rate": 7.184572676229373e-06,
"loss": 0.5532,
"step": 2336
},
{
"epoch": 2.1314131413141313,
"grad_norm": 1.5946100950241089,
"learning_rate": 7.093691048034539e-06,
"loss": 0.5732,
"step": 2368
},
{
"epoch": 2.16021602160216,
"grad_norm": 1.4962303638458252,
"learning_rate": 7.001962716275111e-06,
"loss": 0.6021,
"step": 2400
},
{
"epoch": 2.189018901890189,
"grad_norm": 2.6867597103118896,
"learning_rate": 6.909424776541456e-06,
"loss": 0.5429,
"step": 2432
},
{
"epoch": 2.217821782178218,
"grad_norm": 1.6566091775894165,
"learning_rate": 6.816114651835171e-06,
"loss": 0.5284,
"step": 2464
},
{
"epoch": 2.2466246624662465,
"grad_norm": 1.1805468797683716,
"learning_rate": 6.7220700774349524e-06,
"loss": 0.5797,
"step": 2496
},
{
"epoch": 2.2754275427542754,
"grad_norm": 1.6449726819992065,
"learning_rate": 6.627329085636178e-06,
"loss": 0.5624,
"step": 2528
},
{
"epoch": 2.3042304230423043,
"grad_norm": 1.2300235033035278,
"learning_rate": 6.531929990370387e-06,
"loss": 0.5629,
"step": 2560
},
{
"epoch": 2.333033303330333,
"grad_norm": 1.2485629320144653,
"learning_rate": 6.43591137171084e-06,
"loss": 0.561,
"step": 2592
},
{
"epoch": 2.3618361836183617,
"grad_norm": 1.2627168893814087,
"learning_rate": 6.3393120602704675e-06,
"loss": 0.5707,
"step": 2624
},
{
"epoch": 2.3906390639063906,
"grad_norm": 1.8480335474014282,
"learning_rate": 6.242171121498498e-06,
"loss": 0.5759,
"step": 2656
},
{
"epoch": 2.4194419441944195,
"grad_norm": 1.5584763288497925,
"learning_rate": 6.144527839882107e-06,
"loss": 0.5641,
"step": 2688
},
{
"epoch": 2.4482448244824484,
"grad_norm": 1.1628447771072388,
"learning_rate": 6.046421703059493e-06,
"loss": 0.5853,
"step": 2720
},
{
"epoch": 2.477047704770477,
"grad_norm": 1.420699119567871,
"learning_rate": 5.9478923858507955e-06,
"loss": 0.5572,
"step": 2752
},
{
"epoch": 2.5058505850585058,
"grad_norm": 1.5408899784088135,
"learning_rate": 5.848979734213309e-06,
"loss": 0.5835,
"step": 2784
},
{
"epoch": 2.5346534653465347,
"grad_norm": 3.342973470687866,
"learning_rate": 5.74972374912751e-06,
"loss": 0.5648,
"step": 2816
},
{
"epoch": 2.5634563456345636,
"grad_norm": 1.3234657049179077,
"learning_rate": 5.650164570420358e-06,
"loss": 0.5838,
"step": 2848
},
{
"epoch": 2.592259225922592,
"grad_norm": 3.896916627883911,
"learning_rate": 5.5503424605324715e-06,
"loss": 0.5716,
"step": 2880
},
{
"epoch": 2.621062106210621,
"grad_norm": 1.4137349128723145,
"learning_rate": 5.450297788235718e-06,
"loss": 0.5895,
"step": 2912
},
{
"epoch": 2.64986498649865,
"grad_norm": 1.852388620376587,
"learning_rate": 5.350071012307776e-06,
"loss": 0.5525,
"step": 2944
},
{
"epoch": 2.678667866786679,
"grad_norm": 1.226164698600769,
"learning_rate": 5.24970266517034e-06,
"loss": 0.5549,
"step": 2976
},
{
"epoch": 2.7074707470747077,
"grad_norm": 1.257299542427063,
"learning_rate": 5.149233336497501e-06,
"loss": 0.5797,
"step": 3008
},
{
"epoch": 2.736273627362736,
"grad_norm": 1.4466633796691895,
"learning_rate": 5.048703656801004e-06,
"loss": 0.5629,
"step": 3040
},
{
"epoch": 2.765076507650765,
"grad_norm": 2.2335245609283447,
"learning_rate": 4.948154280998981e-06,
"loss": 0.5626,
"step": 3072
},
{
"epoch": 2.793879387938794,
"grad_norm": 1.4836534261703491,
"learning_rate": 4.847625871974807e-06,
"loss": 0.5659,
"step": 3104
},
{
"epoch": 2.8226822682268224,
"grad_norm": 1.282368540763855,
"learning_rate": 4.747159084132742e-06,
"loss": 0.5957,
"step": 3136
},
{
"epoch": 2.8514851485148514,
"grad_norm": 1.2289072275161743,
"learning_rate": 4.646794546957001e-06,
"loss": 0.5754,
"step": 3168
},
{
"epoch": 2.8802880288028803,
"grad_norm": 1.3047752380371094,
"learning_rate": 4.546572848580907e-06,
"loss": 0.6166,
"step": 3200
},
{
"epoch": 2.909090909090909,
"grad_norm": 1.365952730178833,
"learning_rate": 4.446534519372744e-06,
"loss": 0.5474,
"step": 3232
},
{
"epoch": 2.937893789378938,
"grad_norm": 1.4504116773605347,
"learning_rate": 4.34672001554501e-06,
"loss": 0.564,
"step": 3264
},
{
"epoch": 2.9666966696669665,
"grad_norm": 2.4160330295562744,
"learning_rate": 4.247169702793625e-06,
"loss": 0.5578,
"step": 3296
},
{
"epoch": 2.9954995499549955,
"grad_norm": 1.876542568206787,
"learning_rate": 4.1479238399737485e-06,
"loss": 0.5885,
"step": 3328
},
{
"epoch": 3.0243024302430244,
"grad_norm": 2.028958320617676,
"learning_rate": 4.049022562818829e-06,
"loss": 0.5875,
"step": 3360
},
{
"epoch": 3.0531053105310533,
"grad_norm": 1.76653254032135,
"learning_rate": 3.950505867709418e-06,
"loss": 0.5559,
"step": 3392
},
{
"epoch": 3.0819081908190817,
"grad_norm": 1.768809199333191,
"learning_rate": 3.852413595498343e-06,
"loss": 0.5792,
"step": 3424
},
{
"epoch": 3.1107110711071106,
"grad_norm": 1.5263768434524536,
"learning_rate": 3.7547854153987973e-06,
"loss": 0.5742,
"step": 3456
},
{
"epoch": 3.1395139513951396,
"grad_norm": 1.5573300123214722,
"learning_rate": 3.6576608089418188e-06,
"loss": 0.5664,
"step": 3488
},
{
"epoch": 3.1683168316831685,
"grad_norm": 1.0428520441055298,
"learning_rate": 3.561079054009678e-06,
"loss": 0.5434,
"step": 3520
},
{
"epoch": 3.197119711971197,
"grad_norm": 1.465216875076294,
"learning_rate": 3.4650792089516238e-06,
"loss": 0.5603,
"step": 3552
},
{
"epoch": 3.225922592259226,
"grad_norm": 1.1263318061828613,
"learning_rate": 3.3697000967884107e-06,
"loss": 0.5636,
"step": 3584
},
{
"epoch": 3.2547254725472547,
"grad_norm": 1.6343493461608887,
"learning_rate": 3.274980289511995e-06,
"loss": 0.5797,
"step": 3616
},
{
"epoch": 3.2835283528352837,
"grad_norm": 2.2945406436920166,
"learning_rate": 3.1809580924867358e-06,
"loss": 0.5638,
"step": 3648
},
{
"epoch": 3.312331233123312,
"grad_norm": 1.1425927877426147,
"learning_rate": 3.087671528958445e-06,
"loss": 0.559,
"step": 3680
},
{
"epoch": 3.341134113411341,
"grad_norm": 1.1738224029541016,
"learning_rate": 2.9951583246775147e-06,
"loss": 0.5947,
"step": 3712
},
{
"epoch": 3.36993699369937,
"grad_norm": 1.0960267782211304,
"learning_rate": 2.9034558926423427e-06,
"loss": 0.5609,
"step": 3744
},
{
"epoch": 3.398739873987399,
"grad_norm": 2.027769088745117,
"learning_rate": 2.812601317969266e-06,
"loss": 0.5932,
"step": 3776
},
{
"epoch": 3.4275427542754278,
"grad_norm": 1.8806779384613037,
"learning_rate": 2.7226313428950703e-06,
"loss": 0.546,
"step": 3808
},
{
"epoch": 3.4563456345634562,
"grad_norm": 1.778334617614746,
"learning_rate": 2.633582351918156e-06,
"loss": 0.5645,
"step": 3840
},
{
"epoch": 3.485148514851485,
"grad_norm": 1.2001926898956299,
"learning_rate": 2.545490357084409e-06,
"loss": 0.5634,
"step": 3872
},
{
"epoch": 3.513951395139514,
"grad_norm": 3.2067384719848633,
"learning_rate": 2.4583909834236563e-06,
"loss": 0.5409,
"step": 3904
},
{
"epoch": 3.5427542754275425,
"grad_norm": 2.1273581981658936,
"learning_rate": 2.372319454542659e-06,
"loss": 0.5632,
"step": 3936
},
{
"epoch": 3.5715571557155714,
"grad_norm": 1.4944703578948975,
"learning_rate": 2.28731057838043e-06,
"loss": 0.5438,
"step": 3968
},
{
"epoch": 3.6003600360036003,
"grad_norm": 1.5785070657730103,
"learning_rate": 2.203398733131661e-06,
"loss": 0.5491,
"step": 4000
},
{
"epoch": 3.6291629162916292,
"grad_norm": 1.9863430261611938,
"learning_rate": 2.120617853343926e-06,
"loss": 0.5681,
"step": 4032
},
{
"epoch": 3.657965796579658,
"grad_norm": 1.9844887256622314,
"learning_rate": 2.0390014161943224e-06,
"loss": 0.55,
"step": 4064
},
{
"epoch": 3.6867686768676866,
"grad_norm": 1.2234737873077393,
"learning_rate": 1.958582427951051e-06,
"loss": 0.5517,
"step": 4096
},
{
"epoch": 3.7155715571557155,
"grad_norm": 1.3753498792648315,
"learning_rate": 1.8793934106254557e-06,
"loss": 0.5613,
"step": 4128
},
{
"epoch": 3.7443744374437444,
"grad_norm": 1.6427245140075684,
"learning_rate": 1.8014663888198763e-06,
"loss": 0.5548,
"step": 4160
},
{
"epoch": 3.7731773177317733,
"grad_norm": 2.217555522918701,
"learning_rate": 1.7248328767766875e-06,
"loss": 0.5803,
"step": 4192
},
{
"epoch": 3.801980198019802,
"grad_norm": 1.24624764919281,
"learning_rate": 1.6495238656337076e-06,
"loss": 0.6296,
"step": 4224
},
{
"epoch": 3.8307830783078307,
"grad_norm": 1.3684077262878418,
"learning_rate": 1.575569810891156e-06,
"loss": 0.5865,
"step": 4256
},
{
"epoch": 3.8595859585958596,
"grad_norm": 2.7931735515594482,
"learning_rate": 1.503000620095244e-06,
"loss": 0.5758,
"step": 4288
},
{
"epoch": 3.8883888388838885,
"grad_norm": 1.8946348428726196,
"learning_rate": 1.4318456407433434e-06,
"loss": 0.563,
"step": 4320
},
{
"epoch": 3.9171917191719174,
"grad_norm": 1.750241756439209,
"learning_rate": 1.3621336484156456e-06,
"loss": 0.5692,
"step": 4352
},
{
"epoch": 3.945994599459946,
"grad_norm": 1.4938685894012451,
"learning_rate": 1.2938928351381224e-06,
"loss": 0.5379,
"step": 4384
},
{
"epoch": 3.974797479747975,
"grad_norm": 4.275557041168213,
"learning_rate": 1.2271507979814624e-06,
"loss": 0.534,
"step": 4416
},
{
"epoch": 4.003600360036003,
"grad_norm": 1.308555245399475,
"learning_rate": 1.1619345279006212e-06,
"loss": 0.5957,
"step": 4448
},
{
"epoch": 4.032403240324032,
"grad_norm": 1.5460844039916992,
"learning_rate": 1.0982703988194876e-06,
"loss": 0.5711,
"step": 4480
},
{
"epoch": 4.061206120612061,
"grad_norm": 1.5028718709945679,
"learning_rate": 1.0361841569650816e-06,
"loss": 0.5848,
"step": 4512
},
{
"epoch": 4.09000900090009,
"grad_norm": 1.7333652973175049,
"learning_rate": 9.75700910455592e-07,
"loss": 0.5748,
"step": 4544
},
{
"epoch": 4.118811881188119,
"grad_norm": 1.941709041595459,
"learning_rate": 9.168451191464822e-07,
"loss": 0.5728,
"step": 4576
},
{
"epoch": 4.147614761476148,
"grad_norm": 1.418080449104309,
"learning_rate": 8.596405847387462e-07,
"loss": 0.5787,
"step": 4608
},
{
"epoch": 4.176417641764177,
"grad_norm": 1.187457799911499,
"learning_rate": 8.041104411533329e-07,
"loss": 0.5324,
"step": 4640
},
{
"epoch": 4.205220522052206,
"grad_norm": 1.3761279582977295,
"learning_rate": 7.502771451756197e-07,
"loss": 0.5721,
"step": 4672
},
{
"epoch": 4.234023402340234,
"grad_norm": 0.9498482942581177,
"learning_rate": 6.981624673737336e-07,
"loss": 0.5712,
"step": 4704
},
{
"epoch": 4.262826282628263,
"grad_norm": 1.5619556903839111,
"learning_rate": 6.477874832943781e-07,
"loss": 0.5483,
"step": 4736
},
{
"epoch": 4.2916291629162915,
"grad_norm": 1.354587435722351,
"learning_rate": 5.991725649397279e-07,
"loss": 0.5314,
"step": 4768
},
{
"epoch": 4.32043204320432,
"grad_norm": 1.8367897272109985,
"learning_rate": 5.523373725288506e-07,
"loss": 0.5895,
"step": 4800
},
{
"epoch": 4.349234923492349,
"grad_norm": 1.9770034551620483,
"learning_rate": 5.073008465469731e-07,
"loss": 0.5482,
"step": 4832
},
{
"epoch": 4.378037803780378,
"grad_norm": 1.3692898750305176,
"learning_rate": 4.640812000858108e-07,
"loss": 0.5754,
"step": 4864
},
{
"epoch": 4.406840684068407,
"grad_norm": 1.9973537921905518,
"learning_rate": 4.226959114780699e-07,
"loss": 0.5309,
"step": 4896
},
{
"epoch": 4.435643564356436,
"grad_norm": 1.6791578531265259,
"learning_rate": 3.831617172290808e-07,
"loss": 0.5791,
"step": 4928
},
{
"epoch": 4.464446444644464,
"grad_norm": 1.2366864681243896,
"learning_rate": 3.454946052484376e-07,
"loss": 0.5401,
"step": 4960
},
{
"epoch": 4.493249324932493,
"grad_norm": 1.513241171836853,
"learning_rate": 3.0970980838437416e-07,
"loss": 0.5923,
"step": 4992
},
{
"epoch": 4.522052205220522,
"grad_norm": 1.3842517137527466,
"learning_rate": 2.758217982634903e-07,
"loss": 0.5654,
"step": 5024
},
{
"epoch": 4.550855085508551,
"grad_norm": 1.9963948726654053,
"learning_rate": 2.4384427943832335e-07,
"loss": 0.5736,
"step": 5056
},
{
"epoch": 4.57965796579658,
"grad_norm": 1.2688953876495361,
"learning_rate": 2.1379018384513116e-07,
"loss": 0.5604,
"step": 5088
},
{
"epoch": 4.608460846084609,
"grad_norm": 1.2119768857955933,
"learning_rate": 1.856716655741242e-07,
"loss": 0.561,
"step": 5120
},
{
"epoch": 4.6372637263726375,
"grad_norm": 1.7336974143981934,
"learning_rate": 1.5950009595426474e-07,
"loss": 0.579,
"step": 5152
},
{
"epoch": 4.666066606660666,
"grad_norm": 3.910386562347412,
"learning_rate": 1.3528605895461734e-07,
"loss": 0.5591,
"step": 5184
},
{
"epoch": 4.694869486948695,
"grad_norm": 1.0510704517364502,
"learning_rate": 1.130393469041241e-07,
"loss": 0.5636,
"step": 5216
},
{
"epoch": 4.723672367236723,
"grad_norm": 1.8472617864608765,
"learning_rate": 9.27689565315093e-08,
"loss": 0.5553,
"step": 5248
},
{
"epoch": 4.752475247524752,
"grad_norm": 1.5597736835479736,
"learning_rate": 7.448308532694237e-08,
"loss": 0.5721,
"step": 5280
},
{
"epoch": 4.781278127812781,
"grad_norm": 1.633854627609253,
"learning_rate": 5.8189128226913695e-08,
"loss": 0.5994,
"step": 5312
},
{
"epoch": 4.81008100810081,
"grad_norm": 4.339478015899658,
"learning_rate": 4.389367462367011e-08,
"loss": 0.5648,
"step": 5344
},
{
"epoch": 4.838883888388839,
"grad_norm": 2.3053483963012695,
"learning_rate": 3.160250570041601e-08,
"loss": 0.5796,
"step": 5376
},
{
"epoch": 4.867686768676868,
"grad_norm": 1.711835503578186,
"learning_rate": 2.1320592093364013e-08,
"loss": 0.5467,
"step": 5408
},
{
"epoch": 4.896489648964897,
"grad_norm": 1.162542700767517,
"learning_rate": 1.305209188157186e-08,
"loss": 0.5602,
"step": 5440
},
{
"epoch": 4.925292529252925,
"grad_norm": 1.5160582065582275,
"learning_rate": 6.8003489053886094e-09,
"loss": 0.5406,
"step": 5472
},
{
"epoch": 4.954095409540954,
"grad_norm": 1.8680617809295654,
"learning_rate": 2.5678914141791243e-09,
"loss": 0.553,
"step": 5504
},
{
"epoch": 4.982898289828983,
"grad_norm": 1.2880302667617798,
"learning_rate": 3.564310438830543e-10,
"loss": 0.5609,
"step": 5536
},
{
"epoch": 5.0,
"step": 5555,
"total_flos": 5.796630098241126e+16,
"train_loss": 0.6057587686258861,
"train_runtime": 1046.8819,
"train_samples_per_second": 10.612,
"train_steps_per_second": 5.306
}
],
"logging_steps": 32,
"max_steps": 5555,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 5.796630098241126e+16,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}