my / trainer_state.json
Yhhhhhhhhh's picture
Upload folder using huggingface_hub
3fa5773 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.9975624619134673,
"eval_steps": 50000,
"global_step": 3280,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.01218769043266301,
"grad_norm": 3.578172206878662,
"learning_rate": 5.05050505050505e-07,
"loss": 0.7319,
"step": 10
},
{
"epoch": 0.02437538086532602,
"grad_norm": 4.391892433166504,
"learning_rate": 1.01010101010101e-06,
"loss": 0.6746,
"step": 20
},
{
"epoch": 0.03656307129798903,
"grad_norm": 4.259323596954346,
"learning_rate": 1.5151515151515152e-06,
"loss": 0.722,
"step": 30
},
{
"epoch": 0.04875076173065204,
"grad_norm": 2.5641062259674072,
"learning_rate": 2.02020202020202e-06,
"loss": 0.6053,
"step": 40
},
{
"epoch": 0.06093845216331505,
"grad_norm": 2.138474225997925,
"learning_rate": 2.5252525252525258e-06,
"loss": 0.467,
"step": 50
},
{
"epoch": 0.07312614259597806,
"grad_norm": 3.2234861850738525,
"learning_rate": 3.0303030303030305e-06,
"loss": 0.4346,
"step": 60
},
{
"epoch": 0.08531383302864107,
"grad_norm": 1.6952118873596191,
"learning_rate": 3.5353535353535356e-06,
"loss": 0.4145,
"step": 70
},
{
"epoch": 0.09750152346130408,
"grad_norm": 1.6948118209838867,
"learning_rate": 4.04040404040404e-06,
"loss": 0.4062,
"step": 80
},
{
"epoch": 0.10968921389396709,
"grad_norm": 1.8601840734481812,
"learning_rate": 4.5454545454545455e-06,
"loss": 0.3627,
"step": 90
},
{
"epoch": 0.1218769043266301,
"grad_norm": 1.8506708145141602,
"learning_rate": 4.99999878077914e-06,
"loss": 0.3336,
"step": 100
},
{
"epoch": 0.1340645947592931,
"grad_norm": 1.8687206506729126,
"learning_rate": 4.9998524757147466e-06,
"loss": 0.3489,
"step": 110
},
{
"epoch": 0.14625228519195613,
"grad_norm": 2.1408703327178955,
"learning_rate": 4.999462342829388e-06,
"loss": 0.3617,
"step": 120
},
{
"epoch": 0.15843997562461914,
"grad_norm": 1.6488077640533447,
"learning_rate": 4.9988284201754075e-06,
"loss": 0.3894,
"step": 130
},
{
"epoch": 0.17062766605728213,
"grad_norm": 1.5750221014022827,
"learning_rate": 4.99795076958365e-06,
"loss": 0.3448,
"step": 140
},
{
"epoch": 0.18281535648994515,
"grad_norm": 1.7642357349395752,
"learning_rate": 4.996829476657414e-06,
"loss": 0.3187,
"step": 150
},
{
"epoch": 0.19500304692260817,
"grad_norm": 1.404475212097168,
"learning_rate": 4.995464650764122e-06,
"loss": 0.3343,
"step": 160
},
{
"epoch": 0.2071907373552712,
"grad_norm": 1.5260435342788696,
"learning_rate": 4.993856425024637e-06,
"loss": 0.3143,
"step": 170
},
{
"epoch": 0.21937842778793418,
"grad_norm": 1.5985782146453857,
"learning_rate": 4.992004956300287e-06,
"loss": 0.3301,
"step": 180
},
{
"epoch": 0.2315661182205972,
"grad_norm": 1.6050117015838623,
"learning_rate": 4.989910425177561e-06,
"loss": 0.3501,
"step": 190
},
{
"epoch": 0.2437538086532602,
"grad_norm": 1.5097649097442627,
"learning_rate": 4.987573035950499e-06,
"loss": 0.283,
"step": 200
},
{
"epoch": 0.25594149908592323,
"grad_norm": 1.6257539987564087,
"learning_rate": 4.984993016600763e-06,
"loss": 0.3144,
"step": 210
},
{
"epoch": 0.2681291895185862,
"grad_norm": 1.656672477722168,
"learning_rate": 4.982170618775401e-06,
"loss": 0.3221,
"step": 220
},
{
"epoch": 0.28031687995124926,
"grad_norm": 1.3519649505615234,
"learning_rate": 4.979106117762303e-06,
"loss": 0.3244,
"step": 230
},
{
"epoch": 0.29250457038391225,
"grad_norm": 1.7282569408416748,
"learning_rate": 4.975799812463348e-06,
"loss": 0.3112,
"step": 240
},
{
"epoch": 0.30469226081657524,
"grad_norm": 1.7917057275772095,
"learning_rate": 4.972252025365251e-06,
"loss": 0.3321,
"step": 250
},
{
"epoch": 0.3168799512492383,
"grad_norm": 1.2842172384262085,
"learning_rate": 4.968463102508114e-06,
"loss": 0.3337,
"step": 260
},
{
"epoch": 0.3290676416819013,
"grad_norm": 1.4093726873397827,
"learning_rate": 4.9644334134516645e-06,
"loss": 0.3029,
"step": 270
},
{
"epoch": 0.34125533211456427,
"grad_norm": 1.5072354078292847,
"learning_rate": 4.960163351239216e-06,
"loss": 0.3411,
"step": 280
},
{
"epoch": 0.3534430225472273,
"grad_norm": 1.494828224182129,
"learning_rate": 4.955653332359331e-06,
"loss": 0.3192,
"step": 290
},
{
"epoch": 0.3656307129798903,
"grad_norm": 1.6439958810806274,
"learning_rate": 4.950903796705201e-06,
"loss": 0.3113,
"step": 300
},
{
"epoch": 0.3778184034125533,
"grad_norm": 1.5399495363235474,
"learning_rate": 4.94591520753173e-06,
"loss": 0.3191,
"step": 310
},
{
"epoch": 0.39000609384521634,
"grad_norm": 1.697625756263733,
"learning_rate": 4.940688051410364e-06,
"loss": 0.3092,
"step": 320
},
{
"epoch": 0.40219378427787933,
"grad_norm": 1.5218887329101562,
"learning_rate": 4.935222838181623e-06,
"loss": 0.3343,
"step": 330
},
{
"epoch": 0.4143814747105424,
"grad_norm": 1.6118853092193604,
"learning_rate": 4.929520100905375e-06,
"loss": 0.3115,
"step": 340
},
{
"epoch": 0.42656916514320536,
"grad_norm": 1.3823405504226685,
"learning_rate": 4.923580395808846e-06,
"loss": 0.2922,
"step": 350
},
{
"epoch": 0.43875685557586835,
"grad_norm": 1.289500117301941,
"learning_rate": 4.917404302232362e-06,
"loss": 0.3211,
"step": 360
},
{
"epoch": 0.4509445460085314,
"grad_norm": 1.5171111822128296,
"learning_rate": 4.910992422572845e-06,
"loss": 0.3205,
"step": 370
},
{
"epoch": 0.4631322364411944,
"grad_norm": 1.4650630950927734,
"learning_rate": 4.904345382225058e-06,
"loss": 0.331,
"step": 380
},
{
"epoch": 0.4753199268738574,
"grad_norm": 1.4301706552505493,
"learning_rate": 4.897463829520604e-06,
"loss": 0.3224,
"step": 390
},
{
"epoch": 0.4875076173065204,
"grad_norm": 1.3901742696762085,
"learning_rate": 4.890348435664694e-06,
"loss": 0.3054,
"step": 400
},
{
"epoch": 0.4996953077391834,
"grad_norm": 1.703951358795166,
"learning_rate": 4.882999894670669e-06,
"loss": 0.2973,
"step": 410
},
{
"epoch": 0.5118829981718465,
"grad_norm": 1.3318638801574707,
"learning_rate": 4.875418923292322e-06,
"loss": 0.295,
"step": 420
},
{
"epoch": 0.5240706886045094,
"grad_norm": 1.420301914215088,
"learning_rate": 4.867606260953979e-06,
"loss": 0.3027,
"step": 430
},
{
"epoch": 0.5362583790371724,
"grad_norm": 1.350051999092102,
"learning_rate": 4.85956266967838e-06,
"loss": 0.3007,
"step": 440
},
{
"epoch": 0.5484460694698354,
"grad_norm": 1.2033629417419434,
"learning_rate": 4.8512889340123535e-06,
"loss": 0.3168,
"step": 450
},
{
"epoch": 0.5606337599024985,
"grad_norm": 1.508357286453247,
"learning_rate": 4.842785860950296e-06,
"loss": 0.3137,
"step": 460
},
{
"epoch": 0.5728214503351615,
"grad_norm": 1.8802692890167236,
"learning_rate": 4.834054279855459e-06,
"loss": 0.3118,
"step": 470
},
{
"epoch": 0.5850091407678245,
"grad_norm": 1.3695592880249023,
"learning_rate": 4.825095042379054e-06,
"loss": 0.289,
"step": 480
},
{
"epoch": 0.5971968312004875,
"grad_norm": 1.6230717897415161,
"learning_rate": 4.815909022377189e-06,
"loss": 0.2937,
"step": 490
},
{
"epoch": 0.6093845216331505,
"grad_norm": 1.2749711275100708,
"learning_rate": 4.806497115825629e-06,
"loss": 0.3063,
"step": 500
},
{
"epoch": 0.6215722120658135,
"grad_norm": 1.3291198015213013,
"learning_rate": 4.796860240732414e-06,
"loss": 0.316,
"step": 510
},
{
"epoch": 0.6337599024984766,
"grad_norm": 1.5014513731002808,
"learning_rate": 4.786999337048311e-06,
"loss": 0.3127,
"step": 520
},
{
"epoch": 0.6459475929311396,
"grad_norm": 1.442225456237793,
"learning_rate": 4.77691536657514e-06,
"loss": 0.3057,
"step": 530
},
{
"epoch": 0.6581352833638026,
"grad_norm": 1.5001622438430786,
"learning_rate": 4.766609312871958e-06,
"loss": 0.3006,
"step": 540
},
{
"epoch": 0.6703229737964655,
"grad_norm": 1.5903184413909912,
"learning_rate": 4.756082181159131e-06,
"loss": 0.3131,
"step": 550
},
{
"epoch": 0.6825106642291285,
"grad_norm": 1.5622376203536987,
"learning_rate": 4.745334998220282e-06,
"loss": 0.3079,
"step": 560
},
{
"epoch": 0.6946983546617916,
"grad_norm": 1.5198042392730713,
"learning_rate": 4.734368812302148e-06,
"loss": 0.3257,
"step": 570
},
{
"epoch": 0.7068860450944546,
"grad_norm": 1.7607523202896118,
"learning_rate": 4.723184693012334e-06,
"loss": 0.2991,
"step": 580
},
{
"epoch": 0.7190737355271176,
"grad_norm": 1.416548252105713,
"learning_rate": 4.711783731214984e-06,
"loss": 0.3093,
"step": 590
},
{
"epoch": 0.7312614259597806,
"grad_norm": 1.419439435005188,
"learning_rate": 4.700167038924386e-06,
"loss": 0.2807,
"step": 600
},
{
"epoch": 0.7434491163924436,
"grad_norm": 1.4541571140289307,
"learning_rate": 4.688335749196511e-06,
"loss": 0.3026,
"step": 610
},
{
"epoch": 0.7556368068251066,
"grad_norm": 1.3236526250839233,
"learning_rate": 4.676291016018491e-06,
"loss": 0.2992,
"step": 620
},
{
"epoch": 0.7678244972577697,
"grad_norm": 1.555803656578064,
"learning_rate": 4.664034014196069e-06,
"loss": 0.3223,
"step": 630
},
{
"epoch": 0.7800121876904327,
"grad_norm": 1.5638824701309204,
"learning_rate": 4.651565939239011e-06,
"loss": 0.3069,
"step": 640
},
{
"epoch": 0.7921998781230957,
"grad_norm": 1.5447410345077515,
"learning_rate": 4.638888007244498e-06,
"loss": 0.3112,
"step": 650
},
{
"epoch": 0.8043875685557587,
"grad_norm": 1.562729835510254,
"learning_rate": 4.626001454778511e-06,
"loss": 0.2949,
"step": 660
},
{
"epoch": 0.8165752589884216,
"grad_norm": 1.2367265224456787,
"learning_rate": 4.612907538755224e-06,
"loss": 0.3128,
"step": 670
},
{
"epoch": 0.8287629494210847,
"grad_norm": 1.6397625207901,
"learning_rate": 4.599607536314406e-06,
"loss": 0.3115,
"step": 680
},
{
"epoch": 0.8409506398537477,
"grad_norm": 1.392140507698059,
"learning_rate": 4.586102744696851e-06,
"loss": 0.3189,
"step": 690
},
{
"epoch": 0.8531383302864107,
"grad_norm": 1.19057035446167,
"learning_rate": 4.572394481117855e-06,
"loss": 0.2756,
"step": 700
},
{
"epoch": 0.8653260207190737,
"grad_norm": 1.3636456727981567,
"learning_rate": 4.558484082638729e-06,
"loss": 0.3078,
"step": 710
},
{
"epoch": 0.8775137111517367,
"grad_norm": 1.2579822540283203,
"learning_rate": 4.544372906036399e-06,
"loss": 0.3017,
"step": 720
},
{
"epoch": 0.8897014015843998,
"grad_norm": 1.3275364637374878,
"learning_rate": 4.5300623276710604e-06,
"loss": 0.3277,
"step": 730
},
{
"epoch": 0.9018890920170628,
"grad_norm": 1.1951191425323486,
"learning_rate": 4.515553743351934e-06,
"loss": 0.2817,
"step": 740
},
{
"epoch": 0.9140767824497258,
"grad_norm": 1.5521240234375,
"learning_rate": 4.5008485682011265e-06,
"loss": 0.2845,
"step": 750
},
{
"epoch": 0.9262644728823888,
"grad_norm": 1.7351168394088745,
"learning_rate": 4.4859482365156e-06,
"loss": 0.3032,
"step": 760
},
{
"epoch": 0.9384521633150518,
"grad_norm": 1.2030630111694336,
"learning_rate": 4.470854201627278e-06,
"loss": 0.3154,
"step": 770
},
{
"epoch": 0.9506398537477148,
"grad_norm": 1.4315277338027954,
"learning_rate": 4.4555679357612925e-06,
"loss": 0.3276,
"step": 780
},
{
"epoch": 0.9628275441803779,
"grad_norm": 1.5054643154144287,
"learning_rate": 4.440090929892382e-06,
"loss": 0.2941,
"step": 790
},
{
"epoch": 0.9750152346130408,
"grad_norm": 1.1270413398742676,
"learning_rate": 4.424424693599474e-06,
"loss": 0.2795,
"step": 800
},
{
"epoch": 0.9872029250457038,
"grad_norm": 1.823027491569519,
"learning_rate": 4.4085707549184395e-06,
"loss": 0.2968,
"step": 810
},
{
"epoch": 0.9993906154783668,
"grad_norm": 1.2933018207550049,
"learning_rate": 4.392530660193058e-06,
"loss": 0.289,
"step": 820
},
{
"epoch": 1.01157830591103,
"grad_norm": 1.5592014789581299,
"learning_rate": 4.376305973924188e-06,
"loss": 0.2459,
"step": 830
},
{
"epoch": 1.023765996343693,
"grad_norm": 1.4350308179855347,
"learning_rate": 4.359898278617171e-06,
"loss": 0.2095,
"step": 840
},
{
"epoch": 1.035953686776356,
"grad_norm": 1.4992603063583374,
"learning_rate": 4.343309174627484e-06,
"loss": 0.2504,
"step": 850
},
{
"epoch": 1.048141377209019,
"grad_norm": 1.5574085712432861,
"learning_rate": 4.326540280004634e-06,
"loss": 0.2335,
"step": 860
},
{
"epoch": 1.0603290676416819,
"grad_norm": 1.4838849306106567,
"learning_rate": 4.309593230334355e-06,
"loss": 0.2312,
"step": 870
},
{
"epoch": 1.0725167580743449,
"grad_norm": 1.5157781839370728,
"learning_rate": 4.292469678579063e-06,
"loss": 0.2367,
"step": 880
},
{
"epoch": 1.0847044485070079,
"grad_norm": 1.3213136196136475,
"learning_rate": 4.275171294916641e-06,
"loss": 0.2445,
"step": 890
},
{
"epoch": 1.0968921389396709,
"grad_norm": 1.2431010007858276,
"learning_rate": 4.2576997665775275e-06,
"loss": 0.2329,
"step": 900
},
{
"epoch": 1.1090798293723338,
"grad_norm": 1.3120806217193604,
"learning_rate": 4.2400567976801575e-06,
"loss": 0.2174,
"step": 910
},
{
"epoch": 1.1212675198049968,
"grad_norm": 1.7084835767745972,
"learning_rate": 4.22224410906474e-06,
"loss": 0.2304,
"step": 920
},
{
"epoch": 1.13345521023766,
"grad_norm": 1.3486850261688232,
"learning_rate": 4.204263438125421e-06,
"loss": 0.2206,
"step": 930
},
{
"epoch": 1.145642900670323,
"grad_norm": 1.3106797933578491,
"learning_rate": 4.186116538640814e-06,
"loss": 0.2348,
"step": 940
},
{
"epoch": 1.157830591102986,
"grad_norm": 1.3655650615692139,
"learning_rate": 4.167805180602951e-06,
"loss": 0.2071,
"step": 950
},
{
"epoch": 1.170018281535649,
"grad_norm": 1.3001152276992798,
"learning_rate": 4.149331150044635e-06,
"loss": 0.2053,
"step": 960
},
{
"epoch": 1.182205971968312,
"grad_norm": 1.604116678237915,
"learning_rate": 4.130696248865244e-06,
"loss": 0.2068,
"step": 970
},
{
"epoch": 1.194393662400975,
"grad_norm": 1.3530467748641968,
"learning_rate": 4.111902294654971e-06,
"loss": 0.2468,
"step": 980
},
{
"epoch": 1.206581352833638,
"grad_norm": 1.5899338722229004,
"learning_rate": 4.09295112051755e-06,
"loss": 0.2254,
"step": 990
},
{
"epoch": 1.218769043266301,
"grad_norm": 1.774383306503296,
"learning_rate": 4.073844574891452e-06,
"loss": 0.2306,
"step": 1000
},
{
"epoch": 1.230956733698964,
"grad_norm": 1.1993497610092163,
"learning_rate": 4.054584521369603e-06,
"loss": 0.2294,
"step": 1010
},
{
"epoch": 1.2431444241316272,
"grad_norm": 1.2832852602005005,
"learning_rate": 4.03517283851761e-06,
"loss": 0.2222,
"step": 1020
},
{
"epoch": 1.2553321145642902,
"grad_norm": 1.370401382446289,
"learning_rate": 4.01561141969053e-06,
"loss": 0.2636,
"step": 1030
},
{
"epoch": 1.2675198049969532,
"grad_norm": 1.5473796129226685,
"learning_rate": 3.995902172848205e-06,
"loss": 0.2509,
"step": 1040
},
{
"epoch": 1.2797074954296161,
"grad_norm": 1.3431543111801147,
"learning_rate": 3.976047020369155e-06,
"loss": 0.2165,
"step": 1050
},
{
"epoch": 1.2918951858622791,
"grad_norm": 1.580149531364441,
"learning_rate": 3.9560478988630866e-06,
"loss": 0.22,
"step": 1060
},
{
"epoch": 1.3040828762949421,
"grad_norm": 1.5631303787231445,
"learning_rate": 3.935906758981993e-06,
"loss": 0.2258,
"step": 1070
},
{
"epoch": 1.3162705667276051,
"grad_norm": 1.7426769733428955,
"learning_rate": 3.9156255652299005e-06,
"loss": 0.2378,
"step": 1080
},
{
"epoch": 1.328458257160268,
"grad_norm": 1.5809776782989502,
"learning_rate": 3.89520629577125e-06,
"loss": 0.2477,
"step": 1090
},
{
"epoch": 1.340645947592931,
"grad_norm": 1.4274479150772095,
"learning_rate": 3.8746509422379575e-06,
"loss": 0.2073,
"step": 1100
},
{
"epoch": 1.352833638025594,
"grad_norm": 1.4454461336135864,
"learning_rate": 3.853961509535159e-06,
"loss": 0.2199,
"step": 1110
},
{
"epoch": 1.365021328458257,
"grad_norm": 1.6029589176177979,
"learning_rate": 3.83314001564565e-06,
"loss": 0.2284,
"step": 1120
},
{
"epoch": 1.37720901889092,
"grad_norm": 1.6308578252792358,
"learning_rate": 3.8121884914330663e-06,
"loss": 0.2372,
"step": 1130
},
{
"epoch": 1.389396709323583,
"grad_norm": 1.8481136560440063,
"learning_rate": 3.791108980443794e-06,
"loss": 0.2565,
"step": 1140
},
{
"epoch": 1.4015843997562463,
"grad_norm": 1.325615644454956,
"learning_rate": 3.769903538707652e-06,
"loss": 0.2066,
"step": 1150
},
{
"epoch": 1.4137720901889093,
"grad_norm": 2.084979295730591,
"learning_rate": 3.7485742345373517e-06,
"loss": 0.2217,
"step": 1160
},
{
"epoch": 1.4259597806215722,
"grad_norm": 1.0964964628219604,
"learning_rate": 3.727123148326758e-06,
"loss": 0.2218,
"step": 1170
},
{
"epoch": 1.4381474710542352,
"grad_norm": 1.4757567644119263,
"learning_rate": 3.705552372347981e-06,
"loss": 0.225,
"step": 1180
},
{
"epoch": 1.4503351614868982,
"grad_norm": 1.6368629932403564,
"learning_rate": 3.683864010547294e-06,
"loss": 0.2346,
"step": 1190
},
{
"epoch": 1.4625228519195612,
"grad_norm": 1.3107318878173828,
"learning_rate": 3.662060178339927e-06,
"loss": 0.2393,
"step": 1200
},
{
"epoch": 1.4747105423522242,
"grad_norm": 1.5922001600265503,
"learning_rate": 3.6401430024037315e-06,
"loss": 0.2361,
"step": 1210
},
{
"epoch": 1.4868982327848872,
"grad_norm": 1.651711344718933,
"learning_rate": 3.618114620471756e-06,
"loss": 0.2198,
"step": 1220
},
{
"epoch": 1.4990859232175504,
"grad_norm": 1.4375849962234497,
"learning_rate": 3.5959771811237342e-06,
"loss": 0.2396,
"step": 1230
},
{
"epoch": 1.5112736136502134,
"grad_norm": 1.561681866645813,
"learning_rate": 3.573732843576519e-06,
"loss": 0.2308,
"step": 1240
},
{
"epoch": 1.5234613040828764,
"grad_norm": 1.8882336616516113,
"learning_rate": 3.5513837774734816e-06,
"loss": 0.2051,
"step": 1250
},
{
"epoch": 1.5356489945155394,
"grad_norm": 1.4128990173339844,
"learning_rate": 3.5289321626728912e-06,
"loss": 0.2526,
"step": 1260
},
{
"epoch": 1.5478366849482024,
"grad_norm": 1.9354671239852905,
"learning_rate": 3.5063801890352955e-06,
"loss": 0.2112,
"step": 1270
},
{
"epoch": 1.5600243753808654,
"grad_norm": 1.6497639417648315,
"learning_rate": 3.4837300562099324e-06,
"loss": 0.2199,
"step": 1280
},
{
"epoch": 1.5722120658135283,
"grad_norm": 1.467066764831543,
"learning_rate": 3.4609839734201793e-06,
"loss": 0.249,
"step": 1290
},
{
"epoch": 1.5843997562461913,
"grad_norm": 1.3116928339004517,
"learning_rate": 3.4381441592480756e-06,
"loss": 0.2634,
"step": 1300
},
{
"epoch": 1.5965874466788543,
"grad_norm": 1.9211691617965698,
"learning_rate": 3.4152128414179263e-06,
"loss": 0.2243,
"step": 1310
},
{
"epoch": 1.6087751371115173,
"grad_norm": 1.396285057067871,
"learning_rate": 3.3921922565790188e-06,
"loss": 0.2478,
"step": 1320
},
{
"epoch": 1.6209628275441803,
"grad_norm": 1.3925182819366455,
"learning_rate": 3.3690846500874664e-06,
"loss": 0.2005,
"step": 1330
},
{
"epoch": 1.6331505179768433,
"grad_norm": 1.3843952417373657,
"learning_rate": 3.345892275787204e-06,
"loss": 0.2517,
"step": 1340
},
{
"epoch": 1.6453382084095063,
"grad_norm": 1.3334903717041016,
"learning_rate": 3.3226173957901533e-06,
"loss": 0.2361,
"step": 1350
},
{
"epoch": 1.6575258988421693,
"grad_norm": 1.5368155241012573,
"learning_rate": 3.2992622802555844e-06,
"loss": 0.2228,
"step": 1360
},
{
"epoch": 1.6697135892748323,
"grad_norm": 1.563447117805481,
"learning_rate": 3.2758292071686928e-06,
"loss": 0.2347,
"step": 1370
},
{
"epoch": 1.6819012797074955,
"grad_norm": 1.3434542417526245,
"learning_rate": 3.2523204621184094e-06,
"loss": 0.2326,
"step": 1380
},
{
"epoch": 1.6940889701401585,
"grad_norm": 1.6988780498504639,
"learning_rate": 3.2287383380744746e-06,
"loss": 0.2281,
"step": 1390
},
{
"epoch": 1.7062766605728215,
"grad_norm": 1.9776115417480469,
"learning_rate": 3.2050851351637853e-06,
"loss": 0.2109,
"step": 1400
},
{
"epoch": 1.7184643510054844,
"grad_norm": 1.5286093950271606,
"learning_rate": 3.1813631604460504e-06,
"loss": 0.2387,
"step": 1410
},
{
"epoch": 1.7306520414381474,
"grad_norm": 1.8186579942703247,
"learning_rate": 3.1575747276887657e-06,
"loss": 0.2348,
"step": 1420
},
{
"epoch": 1.7428397318708104,
"grad_norm": 1.8635733127593994,
"learning_rate": 3.1337221571415388e-06,
"loss": 0.2323,
"step": 1430
},
{
"epoch": 1.7550274223034736,
"grad_norm": 1.9394007921218872,
"learning_rate": 3.1098077753097763e-06,
"loss": 0.238,
"step": 1440
},
{
"epoch": 1.7672151127361366,
"grad_norm": 1.6605966091156006,
"learning_rate": 3.085833914727765e-06,
"loss": 0.2223,
"step": 1450
},
{
"epoch": 1.7794028031687996,
"grad_norm": 1.7482880353927612,
"learning_rate": 3.0618029137311634e-06,
"loss": 0.2271,
"step": 1460
},
{
"epoch": 1.7915904936014626,
"grad_norm": 1.3232003450393677,
"learning_rate": 3.037717116228929e-06,
"loss": 0.2372,
"step": 1470
},
{
"epoch": 1.8037781840341256,
"grad_norm": 1.541633129119873,
"learning_rate": 3.013578871474699e-06,
"loss": 0.2397,
"step": 1480
},
{
"epoch": 1.8159658744667886,
"grad_norm": 1.5761010646820068,
"learning_rate": 2.9893905338376503e-06,
"loss": 0.2237,
"step": 1490
},
{
"epoch": 1.8281535648994516,
"grad_norm": 1.5969911813735962,
"learning_rate": 2.965154462572869e-06,
"loss": 0.2099,
"step": 1500
},
{
"epoch": 1.8403412553321146,
"grad_norm": 1.5702909231185913,
"learning_rate": 2.9408730215912247e-06,
"loss": 0.2205,
"step": 1510
},
{
"epoch": 1.8525289457647776,
"grad_norm": 1.5175156593322754,
"learning_rate": 2.91654857922881e-06,
"loss": 0.2149,
"step": 1520
},
{
"epoch": 1.8647166361974405,
"grad_norm": 1.2331769466400146,
"learning_rate": 2.892183508015939e-06,
"loss": 0.2309,
"step": 1530
},
{
"epoch": 1.8769043266301035,
"grad_norm": 1.3271231651306152,
"learning_rate": 2.867780184445735e-06,
"loss": 0.2254,
"step": 1540
},
{
"epoch": 1.8890920170627665,
"grad_norm": 1.648229956626892,
"learning_rate": 2.8433409887423397e-06,
"loss": 0.2251,
"step": 1550
},
{
"epoch": 1.9012797074954295,
"grad_norm": 1.3332548141479492,
"learning_rate": 2.8188683046287496e-06,
"loss": 0.2375,
"step": 1560
},
{
"epoch": 1.9134673979280925,
"grad_norm": 1.6312288045883179,
"learning_rate": 2.794364519094317e-06,
"loss": 0.2195,
"step": 1570
},
{
"epoch": 1.9256550883607555,
"grad_norm": 1.9400866031646729,
"learning_rate": 2.7698320221619278e-06,
"loss": 0.1939,
"step": 1580
},
{
"epoch": 1.9378427787934185,
"grad_norm": 1.6435078382492065,
"learning_rate": 2.7452732066548914e-06,
"loss": 0.2419,
"step": 1590
},
{
"epoch": 1.9500304692260817,
"grad_norm": 1.4376393556594849,
"learning_rate": 2.7206904679635465e-06,
"loss": 0.1974,
"step": 1600
},
{
"epoch": 1.9622181596587447,
"grad_norm": 1.4124246835708618,
"learning_rate": 2.6960862038116265e-06,
"loss": 0.251,
"step": 1610
},
{
"epoch": 1.9744058500914077,
"grad_norm": 1.6351146697998047,
"learning_rate": 2.6714628140223885e-06,
"loss": 0.2148,
"step": 1620
},
{
"epoch": 1.9865935405240707,
"grad_norm": 1.588615894317627,
"learning_rate": 2.6468227002845476e-06,
"loss": 0.2322,
"step": 1630
},
{
"epoch": 1.9987812309567337,
"grad_norm": 1.5556650161743164,
"learning_rate": 2.6221682659180186e-06,
"loss": 0.2059,
"step": 1640
},
{
"epoch": 2.010968921389397,
"grad_norm": 1.7614095211029053,
"learning_rate": 2.597501915639507e-06,
"loss": 0.1819,
"step": 1650
},
{
"epoch": 2.02315661182206,
"grad_norm": 1.7154064178466797,
"learning_rate": 2.5728260553279592e-06,
"loss": 0.1463,
"step": 1660
},
{
"epoch": 2.035344302254723,
"grad_norm": 1.9462904930114746,
"learning_rate": 2.5481430917899e-06,
"loss": 0.1563,
"step": 1670
},
{
"epoch": 2.047531992687386,
"grad_norm": 1.5392667055130005,
"learning_rate": 2.523455432524681e-06,
"loss": 0.1488,
"step": 1680
},
{
"epoch": 2.059719683120049,
"grad_norm": 1.2421252727508545,
"learning_rate": 2.4987654854896606e-06,
"loss": 0.1599,
"step": 1690
},
{
"epoch": 2.071907373552712,
"grad_norm": 1.9953041076660156,
"learning_rate": 2.4740756588653388e-06,
"loss": 0.1493,
"step": 1700
},
{
"epoch": 2.084095063985375,
"grad_norm": 1.3034099340438843,
"learning_rate": 2.4493883608204703e-06,
"loss": 0.161,
"step": 1710
},
{
"epoch": 2.096282754418038,
"grad_norm": 2.1974692344665527,
"learning_rate": 2.4247059992771836e-06,
"loss": 0.1705,
"step": 1720
},
{
"epoch": 2.108470444850701,
"grad_norm": 1.4389041662216187,
"learning_rate": 2.4000309816761105e-06,
"loss": 0.15,
"step": 1730
},
{
"epoch": 2.1206581352833638,
"grad_norm": 1.538169503211975,
"learning_rate": 2.375365714741584e-06,
"loss": 0.1556,
"step": 1740
},
{
"epoch": 2.1328458257160268,
"grad_norm": 1.5628312826156616,
"learning_rate": 2.3507126042468807e-06,
"loss": 0.1846,
"step": 1750
},
{
"epoch": 2.1450335161486898,
"grad_norm": 1.6945581436157227,
"learning_rate": 2.3260740547795818e-06,
"loss": 0.1691,
"step": 1760
},
{
"epoch": 2.1572212065813527,
"grad_norm": 1.927711009979248,
"learning_rate": 2.3014524695070277e-06,
"loss": 0.1617,
"step": 1770
},
{
"epoch": 2.1694088970140157,
"grad_norm": 2.3531131744384766,
"learning_rate": 2.276850249941927e-06,
"loss": 0.1547,
"step": 1780
},
{
"epoch": 2.1815965874466787,
"grad_norm": 1.3538850545883179,
"learning_rate": 2.2522697957081134e-06,
"loss": 0.149,
"step": 1790
},
{
"epoch": 2.1937842778793417,
"grad_norm": 1.6864837408065796,
"learning_rate": 2.2277135043065024e-06,
"loss": 0.1484,
"step": 1800
},
{
"epoch": 2.2059719683120047,
"grad_norm": 1.511353611946106,
"learning_rate": 2.203183770881239e-06,
"loss": 0.1681,
"step": 1810
},
{
"epoch": 2.2181596587446677,
"grad_norm": 1.9546992778778076,
"learning_rate": 2.178682987986088e-06,
"loss": 0.1605,
"step": 1820
},
{
"epoch": 2.2303473491773307,
"grad_norm": 1.924833059310913,
"learning_rate": 2.154213545351067e-06,
"loss": 0.1645,
"step": 1830
},
{
"epoch": 2.2425350396099937,
"grad_norm": 1.4835309982299805,
"learning_rate": 2.129777829649367e-06,
"loss": 0.1544,
"step": 1840
},
{
"epoch": 2.254722730042657,
"grad_norm": 2.8786847591400146,
"learning_rate": 2.1053782242645534e-06,
"loss": 0.1518,
"step": 1850
},
{
"epoch": 2.26691042047532,
"grad_norm": 1.7257006168365479,
"learning_rate": 2.081017109058108e-06,
"loss": 0.1896,
"step": 1860
},
{
"epoch": 2.279098110907983,
"grad_norm": 1.3914209604263306,
"learning_rate": 2.056696860137298e-06,
"loss": 0.1319,
"step": 1870
},
{
"epoch": 2.291285801340646,
"grad_norm": 1.4881747961044312,
"learning_rate": 2.0324198496234227e-06,
"loss": 0.1425,
"step": 1880
},
{
"epoch": 2.303473491773309,
"grad_norm": 1.2742137908935547,
"learning_rate": 2.0081884454204396e-06,
"loss": 0.1517,
"step": 1890
},
{
"epoch": 2.315661182205972,
"grad_norm": 2.283926486968994,
"learning_rate": 1.984005010984011e-06,
"loss": 0.146,
"step": 1900
},
{
"epoch": 2.327848872638635,
"grad_norm": 1.780540943145752,
"learning_rate": 1.9598719050909753e-06,
"loss": 0.1529,
"step": 1910
},
{
"epoch": 2.340036563071298,
"grad_norm": 1.6674730777740479,
"learning_rate": 1.935791481609283e-06,
"loss": 0.1791,
"step": 1920
},
{
"epoch": 2.352224253503961,
"grad_norm": 1.363561987876892,
"learning_rate": 1.9117660892684067e-06,
"loss": 0.1528,
"step": 1930
},
{
"epoch": 2.364411943936624,
"grad_norm": 1.843386173248291,
"learning_rate": 1.8877980714302532e-06,
"loss": 0.139,
"step": 1940
},
{
"epoch": 2.376599634369287,
"grad_norm": 2.3764960765838623,
"learning_rate": 1.8638897658605962e-06,
"loss": 0.1495,
"step": 1950
},
{
"epoch": 2.38878732480195,
"grad_norm": 1.8273096084594727,
"learning_rate": 1.840043504501065e-06,
"loss": 0.1412,
"step": 1960
},
{
"epoch": 2.400975015234613,
"grad_norm": 1.5394623279571533,
"learning_rate": 1.816261613241686e-06,
"loss": 0.1507,
"step": 1970
},
{
"epoch": 2.413162705667276,
"grad_norm": 1.75933837890625,
"learning_rate": 1.7925464116940299e-06,
"loss": 0.1725,
"step": 1980
},
{
"epoch": 2.425350396099939,
"grad_norm": 2.1179471015930176,
"learning_rate": 1.7689002129649584e-06,
"loss": 0.1605,
"step": 1990
},
{
"epoch": 2.437538086532602,
"grad_norm": 1.8330628871917725,
"learning_rate": 1.7453253234310164e-06,
"loss": 0.1599,
"step": 2000
},
{
"epoch": 2.449725776965265,
"grad_norm": 1.8222659826278687,
"learning_rate": 1.7218240425134669e-06,
"loss": 0.1312,
"step": 2010
},
{
"epoch": 2.461913467397928,
"grad_norm": 2.930623769760132,
"learning_rate": 1.6983986624540227e-06,
"loss": 0.1627,
"step": 2020
},
{
"epoch": 2.474101157830591,
"grad_norm": 2.1240384578704834,
"learning_rate": 1.6750514680912606e-06,
"loss": 0.1685,
"step": 2030
},
{
"epoch": 2.4862888482632544,
"grad_norm": 1.4878756999969482,
"learning_rate": 1.6517847366377693e-06,
"loss": 0.1704,
"step": 2040
},
{
"epoch": 2.4984765386959173,
"grad_norm": 1.8569579124450684,
"learning_rate": 1.628600737458037e-06,
"loss": 0.1598,
"step": 2050
},
{
"epoch": 2.5106642291285803,
"grad_norm": 1.5510886907577515,
"learning_rate": 1.605501731847101e-06,
"loss": 0.169,
"step": 2060
},
{
"epoch": 2.5228519195612433,
"grad_norm": 2.6488049030303955,
"learning_rate": 1.5824899728099934e-06,
"loss": 0.1509,
"step": 2070
},
{
"epoch": 2.5350396099939063,
"grad_norm": 2.071225166320801,
"learning_rate": 1.5595677048419855e-06,
"loss": 0.153,
"step": 2080
},
{
"epoch": 2.5472273004265693,
"grad_norm": 2.2100391387939453,
"learning_rate": 1.5367371637096705e-06,
"loss": 0.165,
"step": 2090
},
{
"epoch": 2.5594149908592323,
"grad_norm": 1.6370184421539307,
"learning_rate": 1.5140005762328892e-06,
"loss": 0.1773,
"step": 2100
},
{
"epoch": 2.5716026812918953,
"grad_norm": 1.585856318473816,
"learning_rate": 1.4913601600675387e-06,
"loss": 0.1439,
"step": 2110
},
{
"epoch": 2.5837903717245583,
"grad_norm": 1.8817006349563599,
"learning_rate": 1.468818123489263e-06,
"loss": 0.1378,
"step": 2120
},
{
"epoch": 2.5959780621572213,
"grad_norm": 1.554551124572754,
"learning_rate": 1.4463766651780698e-06,
"loss": 0.1614,
"step": 2130
},
{
"epoch": 2.6081657525898843,
"grad_norm": 2.056910753250122,
"learning_rate": 1.4240379740038758e-06,
"loss": 0.1639,
"step": 2140
},
{
"epoch": 2.6203534430225472,
"grad_norm": 1.518649935722351,
"learning_rate": 1.4018042288130101e-06,
"loss": 0.154,
"step": 2150
},
{
"epoch": 2.6325411334552102,
"grad_norm": 1.8066517114639282,
"learning_rate": 1.3796775982156984e-06,
"loss": 0.1558,
"step": 2160
},
{
"epoch": 2.6447288238878732,
"grad_norm": 1.8393446207046509,
"learning_rate": 1.3576602403745456e-06,
"loss": 0.1618,
"step": 2170
},
{
"epoch": 2.656916514320536,
"grad_norm": 1.4357062578201294,
"learning_rate": 1.3357543027940254e-06,
"loss": 0.1502,
"step": 2180
},
{
"epoch": 2.669104204753199,
"grad_norm": 1.953147053718567,
"learning_rate": 1.3139619221110348e-06,
"loss": 0.161,
"step": 2190
},
{
"epoch": 2.681291895185862,
"grad_norm": 1.988788366317749,
"learning_rate": 1.2922852238864767e-06,
"loss": 0.169,
"step": 2200
},
{
"epoch": 2.693479585618525,
"grad_norm": 1.5082643032073975,
"learning_rate": 1.2707263223979544e-06,
"loss": 0.1466,
"step": 2210
},
{
"epoch": 2.705667276051188,
"grad_norm": 1.587957501411438,
"learning_rate": 1.2492873204335415e-06,
"loss": 0.1594,
"step": 2220
},
{
"epoch": 2.717854966483851,
"grad_norm": 4.279134750366211,
"learning_rate": 1.227970309086685e-06,
"loss": 0.1431,
"step": 2230
},
{
"epoch": 2.730042656916514,
"grad_norm": 2.2551496028900146,
"learning_rate": 1.2067773675522487e-06,
"loss": 0.162,
"step": 2240
},
{
"epoch": 2.742230347349177,
"grad_norm": 1.6612194776535034,
"learning_rate": 1.1857105629237126e-06,
"loss": 0.1597,
"step": 2250
},
{
"epoch": 2.75441803778184,
"grad_norm": 1.6561633348464966,
"learning_rate": 1.164771949991556e-06,
"loss": 0.14,
"step": 2260
},
{
"epoch": 2.766605728214503,
"grad_norm": 2.2157464027404785,
"learning_rate": 1.1439635710428405e-06,
"loss": 0.1363,
"step": 2270
},
{
"epoch": 2.778793418647166,
"grad_norm": 1.709621548652649,
"learning_rate": 1.1232874556620086e-06,
"loss": 0.1849,
"step": 2280
},
{
"epoch": 2.790981109079829,
"grad_norm": 1.89210045337677,
"learning_rate": 1.1027456205329306e-06,
"loss": 0.1435,
"step": 2290
},
{
"epoch": 2.8031687995124925,
"grad_norm": 1.8157742023468018,
"learning_rate": 1.0823400692421938e-06,
"loss": 0.172,
"step": 2300
},
{
"epoch": 2.8153564899451555,
"grad_norm": 1.8622268438339233,
"learning_rate": 1.0620727920836906e-06,
"loss": 0.1562,
"step": 2310
},
{
"epoch": 2.8275441803778185,
"grad_norm": 1.6750710010528564,
"learning_rate": 1.04194576586448e-06,
"loss": 0.1685,
"step": 2320
},
{
"epoch": 2.8397318708104815,
"grad_norm": 1.5609955787658691,
"learning_rate": 1.0219609537119838e-06,
"loss": 0.1762,
"step": 2330
},
{
"epoch": 2.8519195612431445,
"grad_norm": 1.5106117725372314,
"learning_rate": 1.0021203048825095e-06,
"loss": 0.1601,
"step": 2340
},
{
"epoch": 2.8641072516758075,
"grad_norm": 1.2436853647232056,
"learning_rate": 9.824257545711172e-07,
"loss": 0.1451,
"step": 2350
},
{
"epoch": 2.8762949421084705,
"grad_norm": 2.1535840034484863,
"learning_rate": 9.628792237228787e-07,
"loss": 0.1703,
"step": 2360
},
{
"epoch": 2.8884826325411335,
"grad_norm": 1.6186459064483643,
"learning_rate": 9.434826188455056e-07,
"loss": 0.1607,
"step": 2370
},
{
"epoch": 2.9006703229737965,
"grad_norm": 2.03439998626709,
"learning_rate": 9.242378318233978e-07,
"loss": 0.1771,
"step": 2380
},
{
"epoch": 2.9128580134064594,
"grad_norm": 1.5456159114837646,
"learning_rate": 9.051467397331148e-07,
"loss": 0.1738,
"step": 2390
},
{
"epoch": 2.9250457038391224,
"grad_norm": 1.4887679815292358,
"learning_rate": 8.862112046602917e-07,
"loss": 0.167,
"step": 2400
},
{
"epoch": 2.9372333942717854,
"grad_norm": 2.0322256088256836,
"learning_rate": 8.674330735180164e-07,
"loss": 0.1561,
"step": 2410
},
{
"epoch": 2.9494210847044484,
"grad_norm": 1.4082711935043335,
"learning_rate": 8.488141778666878e-07,
"loss": 0.1586,
"step": 2420
},
{
"epoch": 2.9616087751371114,
"grad_norm": 1.7697067260742188,
"learning_rate": 8.303563337353713e-07,
"loss": 0.1435,
"step": 2430
},
{
"epoch": 2.9737964655697744,
"grad_norm": 1.3499540090560913,
"learning_rate": 8.120613414446707e-07,
"loss": 0.1395,
"step": 2440
},
{
"epoch": 2.9859841560024374,
"grad_norm": 1.8283402919769287,
"learning_rate": 7.939309854311242e-07,
"loss": 0.1637,
"step": 2450
},
{
"epoch": 2.998171846435101,
"grad_norm": 1.988641381263733,
"learning_rate": 7.759670340731662e-07,
"loss": 0.1819,
"step": 2460
},
{
"epoch": 3.0103595368677634,
"grad_norm": 1.3205652236938477,
"learning_rate": 7.581712395186341e-07,
"loss": 0.1143,
"step": 2470
},
{
"epoch": 3.0225472273004264,
"grad_norm": 1.1017987728118896,
"learning_rate": 7.405453375138794e-07,
"loss": 0.1116,
"step": 2480
},
{
"epoch": 3.03473491773309,
"grad_norm": 1.5111011266708374,
"learning_rate": 7.230910472344601e-07,
"loss": 0.1226,
"step": 2490
},
{
"epoch": 3.0469226081657528,
"grad_norm": 1.614180088043213,
"learning_rate": 7.058100711174637e-07,
"loss": 0.1304,
"step": 2500
},
{
"epoch": 3.0591102985984158,
"grad_norm": 1.8207646608352661,
"learning_rate": 6.887040946954524e-07,
"loss": 0.1221,
"step": 2510
},
{
"epoch": 3.0712979890310788,
"grad_norm": 1.3963004350662231,
"learning_rate": 6.717747864320648e-07,
"loss": 0.1154,
"step": 2520
},
{
"epoch": 3.0834856794637417,
"grad_norm": 1.460038185119629,
"learning_rate": 6.550237975592774e-07,
"loss": 0.1084,
"step": 2530
},
{
"epoch": 3.0956733698964047,
"grad_norm": 1.6919264793395996,
"learning_rate": 6.384527619163486e-07,
"loss": 0.1272,
"step": 2540
},
{
"epoch": 3.1078610603290677,
"grad_norm": 1.711581826210022,
"learning_rate": 6.220632957904593e-07,
"loss": 0.1126,
"step": 2550
},
{
"epoch": 3.1200487507617307,
"grad_norm": 1.6653200387954712,
"learning_rate": 6.058569977590683e-07,
"loss": 0.1334,
"step": 2560
},
{
"epoch": 3.1322364411943937,
"grad_norm": 1.6422685384750366,
"learning_rate": 5.898354485339839e-07,
"loss": 0.098,
"step": 2570
},
{
"epoch": 3.1444241316270567,
"grad_norm": 1.5357776880264282,
"learning_rate": 5.740002108071974e-07,
"loss": 0.1242,
"step": 2580
},
{
"epoch": 3.1566118220597197,
"grad_norm": 1.4460564851760864,
"learning_rate": 5.583528290984516e-07,
"loss": 0.1071,
"step": 2590
},
{
"epoch": 3.1687995124923827,
"grad_norm": 1.7668797969818115,
"learning_rate": 5.42894829604603e-07,
"loss": 0.114,
"step": 2600
},
{
"epoch": 3.1809872029250457,
"grad_norm": 1.7189100980758667,
"learning_rate": 5.276277200507549e-07,
"loss": 0.1066,
"step": 2610
},
{
"epoch": 3.1931748933577087,
"grad_norm": 1.6018712520599365,
"learning_rate": 5.125529895432008e-07,
"loss": 0.1237,
"step": 2620
},
{
"epoch": 3.2053625837903716,
"grad_norm": 2.137363910675049,
"learning_rate": 4.976721084241818e-07,
"loss": 0.1302,
"step": 2630
},
{
"epoch": 3.2175502742230346,
"grad_norm": 1.3726582527160645,
"learning_rate": 4.829865281284734e-07,
"loss": 0.1101,
"step": 2640
},
{
"epoch": 3.2297379646556976,
"grad_norm": 2.303097724914551,
"learning_rate": 4.684976810418179e-07,
"loss": 0.1057,
"step": 2650
},
{
"epoch": 3.2419256550883606,
"grad_norm": 2.5125813484191895,
"learning_rate": 4.5420698036121285e-07,
"loss": 0.113,
"step": 2660
},
{
"epoch": 3.2541133455210236,
"grad_norm": 1.2105039358139038,
"learning_rate": 4.4011581995707267e-07,
"loss": 0.1094,
"step": 2670
},
{
"epoch": 3.2663010359536866,
"grad_norm": 1.7184550762176514,
"learning_rate": 4.262255742372759e-07,
"loss": 0.1224,
"step": 2680
},
{
"epoch": 3.2784887263863496,
"grad_norm": 1.8564852476119995,
"learning_rate": 4.1253759801310745e-07,
"loss": 0.1038,
"step": 2690
},
{
"epoch": 3.2906764168190126,
"grad_norm": 1.9307692050933838,
"learning_rate": 3.9905322636711654e-07,
"loss": 0.115,
"step": 2700
},
{
"epoch": 3.3028641072516756,
"grad_norm": 2.4194962978363037,
"learning_rate": 3.8577377452289787e-07,
"loss": 0.1281,
"step": 2710
},
{
"epoch": 3.315051797684339,
"grad_norm": 1.7740304470062256,
"learning_rate": 3.727005377168036e-07,
"loss": 0.1246,
"step": 2720
},
{
"epoch": 3.327239488117002,
"grad_norm": 1.6812636852264404,
"learning_rate": 3.5983479107161793e-07,
"loss": 0.1173,
"step": 2730
},
{
"epoch": 3.339427178549665,
"grad_norm": 5.8957037925720215,
"learning_rate": 3.471777894721767e-07,
"loss": 0.1051,
"step": 2740
},
{
"epoch": 3.351614868982328,
"grad_norm": 2.193671226501465,
"learning_rate": 3.347307674429784e-07,
"loss": 0.126,
"step": 2750
},
{
"epoch": 3.363802559414991,
"grad_norm": 1.9564625024795532,
"learning_rate": 3.224949390277668e-07,
"loss": 0.0962,
"step": 2760
},
{
"epoch": 3.375990249847654,
"grad_norm": 2.2077689170837402,
"learning_rate": 3.1047149767111874e-07,
"loss": 0.096,
"step": 2770
},
{
"epoch": 3.388177940280317,
"grad_norm": 1.5315691232681274,
"learning_rate": 2.9866161610203866e-07,
"loss": 0.0954,
"step": 2780
},
{
"epoch": 3.40036563071298,
"grad_norm": 1.900850534439087,
"learning_rate": 2.8706644621957605e-07,
"loss": 0.1221,
"step": 2790
},
{
"epoch": 3.412553321145643,
"grad_norm": 1.5665889978408813,
"learning_rate": 2.756871189804705e-07,
"loss": 0.1054,
"step": 2800
},
{
"epoch": 3.424741011578306,
"grad_norm": 1.9411475658416748,
"learning_rate": 2.6452474428884294e-07,
"loss": 0.1255,
"step": 2810
},
{
"epoch": 3.436928702010969,
"grad_norm": 1.8464025259017944,
"learning_rate": 2.5358041088793863e-07,
"loss": 0.1157,
"step": 2820
},
{
"epoch": 3.449116392443632,
"grad_norm": 2.363825559616089,
"learning_rate": 2.428551862539366e-07,
"loss": 0.1246,
"step": 2830
},
{
"epoch": 3.461304082876295,
"grad_norm": 1.678460955619812,
"learning_rate": 2.323501164918257e-07,
"loss": 0.0985,
"step": 2840
},
{
"epoch": 3.473491773308958,
"grad_norm": 1.818058967590332,
"learning_rate": 2.2206622623337864e-07,
"loss": 0.1228,
"step": 2850
},
{
"epoch": 3.485679463741621,
"grad_norm": 1.9547301530838013,
"learning_rate": 2.1200451853720605e-07,
"loss": 0.1148,
"step": 2860
},
{
"epoch": 3.497867154174284,
"grad_norm": 2.967578172683716,
"learning_rate": 2.0216597479092437e-07,
"loss": 0.1408,
"step": 2870
},
{
"epoch": 3.510054844606947,
"grad_norm": 2.1174635887145996,
"learning_rate": 1.9255155461543385e-07,
"loss": 0.1124,
"step": 2880
},
{
"epoch": 3.5222425350396103,
"grad_norm": 1.7792600393295288,
"learning_rate": 1.8316219577132033e-07,
"loss": 0.1217,
"step": 2890
},
{
"epoch": 3.5344302254722733,
"grad_norm": 1.700875163078308,
"learning_rate": 1.7399881406738762e-07,
"loss": 0.1087,
"step": 2900
},
{
"epoch": 3.5466179159049362,
"grad_norm": 1.9662463665008545,
"learning_rate": 1.650623032713347e-07,
"loss": 0.1047,
"step": 2910
},
{
"epoch": 3.5588056063375992,
"grad_norm": 1.718908429145813,
"learning_rate": 1.5635353502257812e-07,
"loss": 0.1356,
"step": 2920
},
{
"epoch": 3.5709932967702622,
"grad_norm": 1.5018715858459473,
"learning_rate": 1.4787335874723724e-07,
"loss": 0.1148,
"step": 2930
},
{
"epoch": 3.583180987202925,
"grad_norm": 1.5846534967422485,
"learning_rate": 1.3962260157528052e-07,
"loss": 0.1197,
"step": 2940
},
{
"epoch": 3.595368677635588,
"grad_norm": 1.947350025177002,
"learning_rate": 1.3160206825985457e-07,
"loss": 0.1037,
"step": 2950
},
{
"epoch": 3.607556368068251,
"grad_norm": 1.5652320384979248,
"learning_rate": 1.2381254109878644e-07,
"loss": 0.1106,
"step": 2960
},
{
"epoch": 3.619744058500914,
"grad_norm": 2.34379243850708,
"learning_rate": 1.1625477985828276e-07,
"loss": 0.1358,
"step": 2970
},
{
"epoch": 3.631931748933577,
"grad_norm": 2.6311395168304443,
"learning_rate": 1.089295216988262e-07,
"loss": 0.1119,
"step": 2980
},
{
"epoch": 3.64411943936624,
"grad_norm": 1.8833256959915161,
"learning_rate": 1.0183748110327102e-07,
"loss": 0.1245,
"step": 2990
},
{
"epoch": 3.656307129798903,
"grad_norm": 1.1179873943328857,
"learning_rate": 9.497934980715939e-08,
"loss": 0.1097,
"step": 3000
},
{
"epoch": 3.668494820231566,
"grad_norm": 2.062716484069824,
"learning_rate": 8.835579673124677e-08,
"loss": 0.1009,
"step": 3010
},
{
"epoch": 3.680682510664229,
"grad_norm": 1.951429843902588,
"learning_rate": 8.196746791626243e-08,
"loss": 0.1167,
"step": 3020
},
{
"epoch": 3.692870201096892,
"grad_norm": 1.7196236848831177,
"learning_rate": 7.581498645989255e-08,
"loss": 0.1186,
"step": 3030
},
{
"epoch": 3.705057891529555,
"grad_norm": 2.037466049194336,
"learning_rate": 6.989895245600702e-08,
"loss": 0.1264,
"step": 3040
},
{
"epoch": 3.717245581962218,
"grad_norm": 1.9185491800308228,
"learning_rate": 6.421994293612871e-08,
"loss": 0.1304,
"step": 3050
},
{
"epoch": 3.729433272394881,
"grad_norm": 1.453826904296875,
"learning_rate": 5.8778511813150365e-08,
"loss": 0.1158,
"step": 3060
},
{
"epoch": 3.741620962827544,
"grad_norm": 1.3658956289291382,
"learning_rate": 5.357518982730792e-08,
"loss": 0.1045,
"step": 3070
},
{
"epoch": 3.753808653260207,
"grad_norm": 1.7934198379516602,
"learning_rate": 4.861048449441491e-08,
"loss": 0.1203,
"step": 3080
},
{
"epoch": 3.76599634369287,
"grad_norm": 1.9196701049804688,
"learning_rate": 4.3884880056359045e-08,
"loss": 0.1174,
"step": 3090
},
{
"epoch": 3.778184034125533,
"grad_norm": 1.391270399093628,
"learning_rate": 3.939883743387302e-08,
"loss": 0.103,
"step": 3100
},
{
"epoch": 3.790371724558196,
"grad_norm": 1.528946876525879,
"learning_rate": 3.515279418157463e-08,
"loss": 0.1146,
"step": 3110
},
{
"epoch": 3.802559414990859,
"grad_norm": 1.5788449048995972,
"learning_rate": 3.1147164445292923e-08,
"loss": 0.1162,
"step": 3120
},
{
"epoch": 3.814747105423522,
"grad_norm": 2.370635747909546,
"learning_rate": 2.7382338921670693e-08,
"loss": 0.1166,
"step": 3130
},
{
"epoch": 3.826934795856185,
"grad_norm": 1.730526328086853,
"learning_rate": 2.3858684820058376e-08,
"loss": 0.1123,
"step": 3140
},
{
"epoch": 3.839122486288848,
"grad_norm": 1.4985235929489136,
"learning_rate": 2.057654582669738e-08,
"loss": 0.1151,
"step": 3150
},
{
"epoch": 3.8513101767215114,
"grad_norm": 1.7269705533981323,
"learning_rate": 1.753624207119775e-08,
"loss": 0.112,
"step": 3160
},
{
"epoch": 3.8634978671541744,
"grad_norm": 2.0060977935791016,
"learning_rate": 1.4738070095314527e-08,
"loss": 0.1101,
"step": 3170
},
{
"epoch": 3.8756855575868374,
"grad_norm": 2.1134166717529297,
"learning_rate": 1.2182302824023107e-08,
"loss": 0.1212,
"step": 3180
},
{
"epoch": 3.8878732480195004,
"grad_norm": 1.5718306303024292,
"learning_rate": 9.869189538899149e-09,
"loss": 0.1042,
"step": 3190
},
{
"epoch": 3.9000609384521634,
"grad_norm": 1.7236390113830566,
"learning_rate": 7.798955853805245e-09,
"loss": 0.1231,
"step": 3200
},
{
"epoch": 3.9122486288848264,
"grad_norm": 2.0345866680145264,
"learning_rate": 5.971803692883804e-09,
"loss": 0.1181,
"step": 3210
},
{
"epoch": 3.9244363193174894,
"grad_norm": 1.8747888803482056,
"learning_rate": 4.387911270863632e-09,
"loss": 0.1309,
"step": 3220
},
{
"epoch": 3.9366240097501524,
"grad_norm": 2.027182102203369,
"learning_rate": 3.0474330756757874e-09,
"loss": 0.1027,
"step": 3230
},
{
"epoch": 3.9488117001828154,
"grad_norm": 1.62562096118927,
"learning_rate": 1.9504998533870223e-09,
"loss": 0.1265,
"step": 3240
},
{
"epoch": 3.9609993906154783,
"grad_norm": 2.0606415271759033,
"learning_rate": 1.0972185954452596e-09,
"loss": 0.1099,
"step": 3250
},
{
"epoch": 3.9731870810481413,
"grad_norm": 1.293489933013916,
"learning_rate": 4.876725282457195e-10,
"loss": 0.1164,
"step": 3260
},
{
"epoch": 3.9853747714808043,
"grad_norm": 1.6779310703277588,
"learning_rate": 1.2192110501269005e-10,
"loss": 0.139,
"step": 3270
},
{
"epoch": 3.9975624619134673,
"grad_norm": 2.0987327098846436,
"learning_rate": 0.0,
"loss": 0.1073,
"step": 3280
},
{
"epoch": 3.9975624619134673,
"step": 3280,
"total_flos": 3.38221904554623e+17,
"train_loss": 0.21007875238613385,
"train_runtime": 5418.5603,
"train_samples_per_second": 9.688,
"train_steps_per_second": 0.605
}
],
"logging_steps": 10,
"max_steps": 3280,
"num_input_tokens_seen": 0,
"num_train_epochs": 4,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 3.38221904554623e+17,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}