diff --git "a/trainer_state.json" "b/trainer_state.json" new file mode 100644--- /dev/null +++ "b/trainer_state.json" @@ -0,0 +1,18044 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 2.0, + "eval_steps": 200, + "global_step": 2558, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0007818608287724785, + "grad_norm": 0.11400622129440308, + "learning_rate": 3.90625e-07, + "loss": 0.5189, + "step": 1 + }, + { + "epoch": 0.001563721657544957, + "grad_norm": 0.28361040353775024, + "learning_rate": 7.8125e-07, + "loss": 0.7211, + "step": 2 + }, + { + "epoch": 0.0023455824863174357, + "grad_norm": 0.12066957354545593, + "learning_rate": 1.1718750000000001e-06, + "loss": 0.4869, + "step": 3 + }, + { + "epoch": 0.003127443315089914, + "grad_norm": 0.14368808269500732, + "learning_rate": 1.5625e-06, + "loss": 0.6068, + "step": 4 + }, + { + "epoch": 0.003909304143862392, + "grad_norm": 0.17054139077663422, + "learning_rate": 1.953125e-06, + "loss": 0.69, + "step": 5 + }, + { + "epoch": 0.004691164972634871, + "grad_norm": 0.13562728464603424, + "learning_rate": 2.3437500000000002e-06, + "loss": 0.5394, + "step": 6 + }, + { + "epoch": 0.00547302580140735, + "grad_norm": 0.1532646119594574, + "learning_rate": 2.734375e-06, + "loss": 0.5898, + "step": 7 + }, + { + "epoch": 0.006254886630179828, + "grad_norm": 0.1191975548863411, + "learning_rate": 3.125e-06, + "loss": 0.5587, + "step": 8 + }, + { + "epoch": 0.007036747458952306, + "grad_norm": 0.13896405696868896, + "learning_rate": 3.5156250000000003e-06, + "loss": 0.6037, + "step": 9 + }, + { + "epoch": 0.007818608287724784, + "grad_norm": 0.12083485722541809, + "learning_rate": 3.90625e-06, + "loss": 0.516, + "step": 10 + }, + { + "epoch": 0.008600469116497263, + "grad_norm": 0.2188330888748169, + "learning_rate": 4.296875e-06, + "loss": 0.746, + "step": 11 + }, + { + "epoch": 0.009382329945269743, + "grad_norm": 0.1677808314561844, + "learning_rate": 4.6875000000000004e-06, + "loss": 0.5886, + "step": 12 + }, + { + "epoch": 0.010164190774042221, + "grad_norm": 0.1499367654323578, + "learning_rate": 5.078125000000001e-06, + "loss": 0.6232, + "step": 13 + }, + { + "epoch": 0.0109460516028147, + "grad_norm": 0.14739449322223663, + "learning_rate": 5.46875e-06, + "loss": 0.5995, + "step": 14 + }, + { + "epoch": 0.011727912431587178, + "grad_norm": 0.15768533945083618, + "learning_rate": 5.859375e-06, + "loss": 0.6123, + "step": 15 + }, + { + "epoch": 0.012509773260359656, + "grad_norm": 0.1611998975276947, + "learning_rate": 6.25e-06, + "loss": 0.5923, + "step": 16 + }, + { + "epoch": 0.013291634089132134, + "grad_norm": 0.19843457639217377, + "learning_rate": 6.6406250000000005e-06, + "loss": 0.6369, + "step": 17 + }, + { + "epoch": 0.014073494917904612, + "grad_norm": 0.1513635814189911, + "learning_rate": 7.031250000000001e-06, + "loss": 0.6138, + "step": 18 + }, + { + "epoch": 0.014855355746677092, + "grad_norm": 0.15541641414165497, + "learning_rate": 7.421875e-06, + "loss": 0.5324, + "step": 19 + }, + { + "epoch": 0.01563721657544957, + "grad_norm": 0.2085491418838501, + "learning_rate": 7.8125e-06, + "loss": 0.602, + "step": 20 + }, + { + "epoch": 0.01641907740422205, + "grad_norm": 0.21879960596561432, + "learning_rate": 8.203125000000001e-06, + "loss": 0.5874, + "step": 21 + }, + { + "epoch": 0.017200938232994525, + "grad_norm": 0.21974720060825348, + "learning_rate": 8.59375e-06, + "loss": 0.7044, + "step": 22 + }, + { + "epoch": 0.017982799061767005, + "grad_norm": 0.32434722781181335, + "learning_rate": 8.984375e-06, + "loss": 0.6598, + "step": 23 + }, + { + "epoch": 0.018764659890539485, + "grad_norm": 0.20202045142650604, + "learning_rate": 9.375000000000001e-06, + "loss": 0.5392, + "step": 24 + }, + { + "epoch": 0.019546520719311962, + "grad_norm": 0.14759372174739838, + "learning_rate": 9.765625e-06, + "loss": 0.453, + "step": 25 + }, + { + "epoch": 0.020328381548084442, + "grad_norm": 0.28198951482772827, + "learning_rate": 1.0156250000000001e-05, + "loss": 0.6349, + "step": 26 + }, + { + "epoch": 0.02111024237685692, + "grad_norm": 0.2123844176530838, + "learning_rate": 1.0546875e-05, + "loss": 0.6462, + "step": 27 + }, + { + "epoch": 0.0218921032056294, + "grad_norm": 0.19585418701171875, + "learning_rate": 1.09375e-05, + "loss": 0.4884, + "step": 28 + }, + { + "epoch": 0.022673964034401875, + "grad_norm": 0.2947320342063904, + "learning_rate": 1.1328125000000001e-05, + "loss": 0.6294, + "step": 29 + }, + { + "epoch": 0.023455824863174355, + "grad_norm": 0.24433963000774384, + "learning_rate": 1.171875e-05, + "loss": 0.6965, + "step": 30 + }, + { + "epoch": 0.024237685691946835, + "grad_norm": 0.2690712511539459, + "learning_rate": 1.2109375000000001e-05, + "loss": 0.5956, + "step": 31 + }, + { + "epoch": 0.02501954652071931, + "grad_norm": 0.28039294481277466, + "learning_rate": 1.25e-05, + "loss": 0.4375, + "step": 32 + }, + { + "epoch": 0.02580140734949179, + "grad_norm": 0.30447208881378174, + "learning_rate": 1.2890625e-05, + "loss": 0.5704, + "step": 33 + }, + { + "epoch": 0.026583268178264268, + "grad_norm": 0.33207401633262634, + "learning_rate": 1.3281250000000001e-05, + "loss": 0.6211, + "step": 34 + }, + { + "epoch": 0.027365129007036748, + "grad_norm": 0.25266459584236145, + "learning_rate": 1.3671875e-05, + "loss": 0.5197, + "step": 35 + }, + { + "epoch": 0.028146989835809225, + "grad_norm": 0.1736496388912201, + "learning_rate": 1.4062500000000001e-05, + "loss": 0.4186, + "step": 36 + }, + { + "epoch": 0.028928850664581705, + "grad_norm": 0.36728987097740173, + "learning_rate": 1.4453125e-05, + "loss": 0.5726, + "step": 37 + }, + { + "epoch": 0.029710711493354185, + "grad_norm": 0.22769613564014435, + "learning_rate": 1.484375e-05, + "loss": 0.4935, + "step": 38 + }, + { + "epoch": 0.03049257232212666, + "grad_norm": 0.41446453332901, + "learning_rate": 1.5234375000000001e-05, + "loss": 0.5832, + "step": 39 + }, + { + "epoch": 0.03127443315089914, + "grad_norm": 0.24155667424201965, + "learning_rate": 1.5625e-05, + "loss": 0.5682, + "step": 40 + }, + { + "epoch": 0.03205629397967162, + "grad_norm": 0.30262166261672974, + "learning_rate": 1.6015625e-05, + "loss": 0.5512, + "step": 41 + }, + { + "epoch": 0.0328381548084441, + "grad_norm": 0.21015244722366333, + "learning_rate": 1.6406250000000002e-05, + "loss": 0.4404, + "step": 42 + }, + { + "epoch": 0.033620015637216574, + "grad_norm": 0.3361029326915741, + "learning_rate": 1.6796875e-05, + "loss": 0.5782, + "step": 43 + }, + { + "epoch": 0.03440187646598905, + "grad_norm": 0.31037694215774536, + "learning_rate": 1.71875e-05, + "loss": 0.5791, + "step": 44 + }, + { + "epoch": 0.035183737294761534, + "grad_norm": 0.2708664834499359, + "learning_rate": 1.7578125000000002e-05, + "loss": 0.4308, + "step": 45 + }, + { + "epoch": 0.03596559812353401, + "grad_norm": 0.10701970010995865, + "learning_rate": 1.796875e-05, + "loss": 0.2914, + "step": 46 + }, + { + "epoch": 0.03674745895230649, + "grad_norm": 0.26695603132247925, + "learning_rate": 1.8359375e-05, + "loss": 0.5574, + "step": 47 + }, + { + "epoch": 0.03752931978107897, + "grad_norm": 0.1568523496389389, + "learning_rate": 1.8750000000000002e-05, + "loss": 0.4056, + "step": 48 + }, + { + "epoch": 0.03831118060985145, + "grad_norm": 0.15718792378902435, + "learning_rate": 1.9140625e-05, + "loss": 0.3688, + "step": 49 + }, + { + "epoch": 0.039093041438623924, + "grad_norm": 0.2712760269641876, + "learning_rate": 1.953125e-05, + "loss": 0.5625, + "step": 50 + }, + { + "epoch": 0.0398749022673964, + "grad_norm": 0.23504135012626648, + "learning_rate": 1.9921875e-05, + "loss": 0.5107, + "step": 51 + }, + { + "epoch": 0.040656763096168884, + "grad_norm": 0.22949786484241486, + "learning_rate": 2.0312500000000002e-05, + "loss": 0.5666, + "step": 52 + }, + { + "epoch": 0.04143862392494136, + "grad_norm": 0.1631242036819458, + "learning_rate": 2.0703125e-05, + "loss": 0.4784, + "step": 53 + }, + { + "epoch": 0.04222048475371384, + "grad_norm": 0.17866574227809906, + "learning_rate": 2.109375e-05, + "loss": 0.407, + "step": 54 + }, + { + "epoch": 0.04300234558248632, + "grad_norm": 0.24938495457172394, + "learning_rate": 2.1484375000000002e-05, + "loss": 0.5188, + "step": 55 + }, + { + "epoch": 0.0437842064112588, + "grad_norm": 0.19512587785720825, + "learning_rate": 2.1875e-05, + "loss": 0.5769, + "step": 56 + }, + { + "epoch": 0.044566067240031274, + "grad_norm": 0.18683001399040222, + "learning_rate": 2.2265625e-05, + "loss": 0.3868, + "step": 57 + }, + { + "epoch": 0.04534792806880375, + "grad_norm": 0.17009177803993225, + "learning_rate": 2.2656250000000002e-05, + "loss": 0.4581, + "step": 58 + }, + { + "epoch": 0.046129788897576234, + "grad_norm": 0.1321457028388977, + "learning_rate": 2.3046875e-05, + "loss": 0.3902, + "step": 59 + }, + { + "epoch": 0.04691164972634871, + "grad_norm": 0.15425974130630493, + "learning_rate": 2.34375e-05, + "loss": 0.3024, + "step": 60 + }, + { + "epoch": 0.04769351055512119, + "grad_norm": 0.19503839313983917, + "learning_rate": 2.3828125e-05, + "loss": 0.4603, + "step": 61 + }, + { + "epoch": 0.04847537138389367, + "grad_norm": 0.1452198475599289, + "learning_rate": 2.4218750000000003e-05, + "loss": 0.4471, + "step": 62 + }, + { + "epoch": 0.04925723221266615, + "grad_norm": 0.18100936710834503, + "learning_rate": 2.4609375e-05, + "loss": 0.451, + "step": 63 + }, + { + "epoch": 0.05003909304143862, + "grad_norm": 0.15564265847206116, + "learning_rate": 2.5e-05, + "loss": 0.3201, + "step": 64 + }, + { + "epoch": 0.0508209538702111, + "grad_norm": 0.1226576417684555, + "learning_rate": 2.5390625000000002e-05, + "loss": 0.3894, + "step": 65 + }, + { + "epoch": 0.05160281469898358, + "grad_norm": 0.14897918701171875, + "learning_rate": 2.578125e-05, + "loss": 0.3756, + "step": 66 + }, + { + "epoch": 0.05238467552775606, + "grad_norm": 0.13855944573879242, + "learning_rate": 2.6171875e-05, + "loss": 0.3293, + "step": 67 + }, + { + "epoch": 0.053166536356528536, + "grad_norm": 0.21206879615783691, + "learning_rate": 2.6562500000000002e-05, + "loss": 0.4521, + "step": 68 + }, + { + "epoch": 0.05394839718530102, + "grad_norm": 0.15101028978824615, + "learning_rate": 2.6953125000000003e-05, + "loss": 0.3386, + "step": 69 + }, + { + "epoch": 0.054730258014073496, + "grad_norm": 0.22186748683452606, + "learning_rate": 2.734375e-05, + "loss": 0.5313, + "step": 70 + }, + { + "epoch": 0.05551211884284597, + "grad_norm": 0.22481444478034973, + "learning_rate": 2.7734375e-05, + "loss": 0.4278, + "step": 71 + }, + { + "epoch": 0.05629397967161845, + "grad_norm": 0.2308468520641327, + "learning_rate": 2.8125000000000003e-05, + "loss": 0.3959, + "step": 72 + }, + { + "epoch": 0.05707584050039093, + "grad_norm": 0.2079354226589203, + "learning_rate": 2.8515625e-05, + "loss": 0.3525, + "step": 73 + }, + { + "epoch": 0.05785770132916341, + "grad_norm": 0.18281999230384827, + "learning_rate": 2.890625e-05, + "loss": 0.3882, + "step": 74 + }, + { + "epoch": 0.058639562157935886, + "grad_norm": 0.17227689921855927, + "learning_rate": 2.9296875000000002e-05, + "loss": 0.286, + "step": 75 + }, + { + "epoch": 0.05942142298670837, + "grad_norm": 0.20047691464424133, + "learning_rate": 2.96875e-05, + "loss": 0.3934, + "step": 76 + }, + { + "epoch": 0.060203283815480846, + "grad_norm": 0.1591797173023224, + "learning_rate": 3.0078125e-05, + "loss": 0.3894, + "step": 77 + }, + { + "epoch": 0.06098514464425332, + "grad_norm": 0.16098585724830627, + "learning_rate": 3.0468750000000002e-05, + "loss": 0.3237, + "step": 78 + }, + { + "epoch": 0.0617670054730258, + "grad_norm": 0.2547154426574707, + "learning_rate": 3.0859375e-05, + "loss": 0.4338, + "step": 79 + }, + { + "epoch": 0.06254886630179828, + "grad_norm": 0.23184354603290558, + "learning_rate": 3.125e-05, + "loss": 0.3741, + "step": 80 + }, + { + "epoch": 0.06333072713057075, + "grad_norm": 0.17047226428985596, + "learning_rate": 3.1640625e-05, + "loss": 0.3753, + "step": 81 + }, + { + "epoch": 0.06411258795934324, + "grad_norm": 0.18740379810333252, + "learning_rate": 3.203125e-05, + "loss": 0.2879, + "step": 82 + }, + { + "epoch": 0.06489444878811572, + "grad_norm": 0.17314107716083527, + "learning_rate": 3.2421875e-05, + "loss": 0.3541, + "step": 83 + }, + { + "epoch": 0.0656763096168882, + "grad_norm": 0.1975407898426056, + "learning_rate": 3.2812500000000005e-05, + "loss": 0.3109, + "step": 84 + }, + { + "epoch": 0.06645817044566067, + "grad_norm": 0.16543574631214142, + "learning_rate": 3.3203125e-05, + "loss": 0.3602, + "step": 85 + }, + { + "epoch": 0.06724003127443315, + "grad_norm": 0.18290646374225616, + "learning_rate": 3.359375e-05, + "loss": 0.2981, + "step": 86 + }, + { + "epoch": 0.06802189210320563, + "grad_norm": 0.24966593086719513, + "learning_rate": 3.3984375000000004e-05, + "loss": 0.3733, + "step": 87 + }, + { + "epoch": 0.0688037529319781, + "grad_norm": 0.20844031870365143, + "learning_rate": 3.4375e-05, + "loss": 0.3166, + "step": 88 + }, + { + "epoch": 0.06958561376075059, + "grad_norm": 0.19846050441265106, + "learning_rate": 3.4765625e-05, + "loss": 0.3384, + "step": 89 + }, + { + "epoch": 0.07036747458952307, + "grad_norm": 0.1682157814502716, + "learning_rate": 3.5156250000000004e-05, + "loss": 0.3681, + "step": 90 + }, + { + "epoch": 0.07114933541829555, + "grad_norm": 0.14380459487438202, + "learning_rate": 3.5546875e-05, + "loss": 0.269, + "step": 91 + }, + { + "epoch": 0.07193119624706802, + "grad_norm": 0.1845722645521164, + "learning_rate": 3.59375e-05, + "loss": 0.3338, + "step": 92 + }, + { + "epoch": 0.0727130570758405, + "grad_norm": 0.20639048516750336, + "learning_rate": 3.6328125000000004e-05, + "loss": 0.3314, + "step": 93 + }, + { + "epoch": 0.07349491790461297, + "grad_norm": 0.17344796657562256, + "learning_rate": 3.671875e-05, + "loss": 0.2556, + "step": 94 + }, + { + "epoch": 0.07427677873338545, + "grad_norm": 0.2165909856557846, + "learning_rate": 3.7109375e-05, + "loss": 0.2964, + "step": 95 + }, + { + "epoch": 0.07505863956215794, + "grad_norm": 0.3408878445625305, + "learning_rate": 3.7500000000000003e-05, + "loss": 0.3842, + "step": 96 + }, + { + "epoch": 0.07584050039093042, + "grad_norm": 0.17121568322181702, + "learning_rate": 3.7890625e-05, + "loss": 0.3365, + "step": 97 + }, + { + "epoch": 0.0766223612197029, + "grad_norm": 0.21942703425884247, + "learning_rate": 3.828125e-05, + "loss": 0.3703, + "step": 98 + }, + { + "epoch": 0.07740422204847537, + "grad_norm": 0.22385479509830475, + "learning_rate": 3.8671875e-05, + "loss": 0.2629, + "step": 99 + }, + { + "epoch": 0.07818608287724785, + "grad_norm": 0.2156868726015091, + "learning_rate": 3.90625e-05, + "loss": 0.2524, + "step": 100 + }, + { + "epoch": 0.07896794370602032, + "grad_norm": 0.21917618811130524, + "learning_rate": 3.9453125000000005e-05, + "loss": 0.3201, + "step": 101 + }, + { + "epoch": 0.0797498045347928, + "grad_norm": 0.26034292578697205, + "learning_rate": 3.984375e-05, + "loss": 0.3169, + "step": 102 + }, + { + "epoch": 0.08053166536356529, + "grad_norm": 0.19670964777469635, + "learning_rate": 4.0234375e-05, + "loss": 0.2685, + "step": 103 + }, + { + "epoch": 0.08131352619233777, + "grad_norm": 0.18302002549171448, + "learning_rate": 4.0625000000000005e-05, + "loss": 0.2827, + "step": 104 + }, + { + "epoch": 0.08209538702111024, + "grad_norm": 0.2715568542480469, + "learning_rate": 4.1015625e-05, + "loss": 0.3265, + "step": 105 + }, + { + "epoch": 0.08287724784988272, + "grad_norm": 0.35596245527267456, + "learning_rate": 4.140625e-05, + "loss": 0.25, + "step": 106 + }, + { + "epoch": 0.0836591086786552, + "grad_norm": 0.24965617060661316, + "learning_rate": 4.1796875000000005e-05, + "loss": 0.2673, + "step": 107 + }, + { + "epoch": 0.08444096950742767, + "grad_norm": 0.2757260501384735, + "learning_rate": 4.21875e-05, + "loss": 0.279, + "step": 108 + }, + { + "epoch": 0.08522283033620015, + "grad_norm": 0.19423194229602814, + "learning_rate": 4.2578125e-05, + "loss": 0.2734, + "step": 109 + }, + { + "epoch": 0.08600469116497264, + "grad_norm": 0.25273609161376953, + "learning_rate": 4.2968750000000004e-05, + "loss": 0.2508, + "step": 110 + }, + { + "epoch": 0.08678655199374512, + "grad_norm": 0.26874634623527527, + "learning_rate": 4.3359375e-05, + "loss": 0.2911, + "step": 111 + }, + { + "epoch": 0.0875684128225176, + "grad_norm": 0.2321517914533615, + "learning_rate": 4.375e-05, + "loss": 0.2583, + "step": 112 + }, + { + "epoch": 0.08835027365129007, + "grad_norm": 0.23453573882579803, + "learning_rate": 4.4140625000000004e-05, + "loss": 0.2177, + "step": 113 + }, + { + "epoch": 0.08913213448006255, + "grad_norm": 0.23830734193325043, + "learning_rate": 4.453125e-05, + "loss": 0.3184, + "step": 114 + }, + { + "epoch": 0.08991399530883502, + "grad_norm": 0.28641262650489807, + "learning_rate": 4.4921875e-05, + "loss": 0.226, + "step": 115 + }, + { + "epoch": 0.0906958561376075, + "grad_norm": 0.30627748370170593, + "learning_rate": 4.5312500000000004e-05, + "loss": 0.2691, + "step": 116 + }, + { + "epoch": 0.09147771696637999, + "grad_norm": 0.25366127490997314, + "learning_rate": 4.5703125e-05, + "loss": 0.2986, + "step": 117 + }, + { + "epoch": 0.09225957779515247, + "grad_norm": 0.32924023270606995, + "learning_rate": 4.609375e-05, + "loss": 0.3162, + "step": 118 + }, + { + "epoch": 0.09304143862392494, + "grad_norm": 0.19363336265087128, + "learning_rate": 4.6484375e-05, + "loss": 0.2461, + "step": 119 + }, + { + "epoch": 0.09382329945269742, + "grad_norm": 0.23301272094249725, + "learning_rate": 4.6875e-05, + "loss": 0.2032, + "step": 120 + }, + { + "epoch": 0.0946051602814699, + "grad_norm": 0.2663356363773346, + "learning_rate": 4.7265625000000005e-05, + "loss": 0.2863, + "step": 121 + }, + { + "epoch": 0.09538702111024237, + "grad_norm": 0.28720682859420776, + "learning_rate": 4.765625e-05, + "loss": 0.2427, + "step": 122 + }, + { + "epoch": 0.09616888193901485, + "grad_norm": 0.2564330995082855, + "learning_rate": 4.8046875e-05, + "loss": 0.1983, + "step": 123 + }, + { + "epoch": 0.09695074276778734, + "grad_norm": 0.3134055733680725, + "learning_rate": 4.8437500000000005e-05, + "loss": 0.3391, + "step": 124 + }, + { + "epoch": 0.09773260359655982, + "grad_norm": 0.25326237082481384, + "learning_rate": 4.8828125e-05, + "loss": 0.2411, + "step": 125 + }, + { + "epoch": 0.0985144644253323, + "grad_norm": 0.2458665817975998, + "learning_rate": 4.921875e-05, + "loss": 0.2508, + "step": 126 + }, + { + "epoch": 0.09929632525410477, + "grad_norm": 0.27264484763145447, + "learning_rate": 4.9609375000000005e-05, + "loss": 0.2081, + "step": 127 + }, + { + "epoch": 0.10007818608287725, + "grad_norm": 0.27833232283592224, + "learning_rate": 5e-05, + "loss": 0.2626, + "step": 128 + }, + { + "epoch": 0.10086004691164972, + "grad_norm": 0.26720625162124634, + "learning_rate": 5.0390625e-05, + "loss": 0.2484, + "step": 129 + }, + { + "epoch": 0.1016419077404222, + "grad_norm": 0.21806442737579346, + "learning_rate": 5.0781250000000004e-05, + "loss": 0.2157, + "step": 130 + }, + { + "epoch": 0.10242376856919469, + "grad_norm": 0.3490794599056244, + "learning_rate": 5.1171875e-05, + "loss": 0.2575, + "step": 131 + }, + { + "epoch": 0.10320562939796717, + "grad_norm": 0.2614622712135315, + "learning_rate": 5.15625e-05, + "loss": 0.2364, + "step": 132 + }, + { + "epoch": 0.10398749022673964, + "grad_norm": 0.3352520167827606, + "learning_rate": 5.1953125000000004e-05, + "loss": 0.2602, + "step": 133 + }, + { + "epoch": 0.10476935105551212, + "grad_norm": 0.28158560395240784, + "learning_rate": 5.234375e-05, + "loss": 0.2229, + "step": 134 + }, + { + "epoch": 0.1055512118842846, + "grad_norm": 0.19348321855068207, + "learning_rate": 5.2734375e-05, + "loss": 0.2265, + "step": 135 + }, + { + "epoch": 0.10633307271305707, + "grad_norm": 0.32511651515960693, + "learning_rate": 5.3125000000000004e-05, + "loss": 0.2831, + "step": 136 + }, + { + "epoch": 0.10711493354182955, + "grad_norm": 0.2327871322631836, + "learning_rate": 5.3515625e-05, + "loss": 0.2626, + "step": 137 + }, + { + "epoch": 0.10789679437060204, + "grad_norm": 0.3208716809749603, + "learning_rate": 5.3906250000000006e-05, + "loss": 0.246, + "step": 138 + }, + { + "epoch": 0.10867865519937452, + "grad_norm": 0.18219627439975739, + "learning_rate": 5.4296875000000004e-05, + "loss": 0.1884, + "step": 139 + }, + { + "epoch": 0.10946051602814699, + "grad_norm": 0.3777305483818054, + "learning_rate": 5.46875e-05, + "loss": 0.254, + "step": 140 + }, + { + "epoch": 0.11024237685691947, + "grad_norm": 0.23451992869377136, + "learning_rate": 5.5078125000000006e-05, + "loss": 0.1793, + "step": 141 + }, + { + "epoch": 0.11102423768569195, + "grad_norm": 0.3080081343650818, + "learning_rate": 5.546875e-05, + "loss": 0.2904, + "step": 142 + }, + { + "epoch": 0.11180609851446442, + "grad_norm": 0.20907637476921082, + "learning_rate": 5.5859375e-05, + "loss": 0.2452, + "step": 143 + }, + { + "epoch": 0.1125879593432369, + "grad_norm": 0.2892386317253113, + "learning_rate": 5.6250000000000005e-05, + "loss": 0.2651, + "step": 144 + }, + { + "epoch": 0.11336982017200939, + "grad_norm": 0.25630876421928406, + "learning_rate": 5.6640625e-05, + "loss": 0.264, + "step": 145 + }, + { + "epoch": 0.11415168100078187, + "grad_norm": 0.2772256135940552, + "learning_rate": 5.703125e-05, + "loss": 0.2461, + "step": 146 + }, + { + "epoch": 0.11493354182955434, + "grad_norm": 0.28796228766441345, + "learning_rate": 5.7421875000000005e-05, + "loss": 0.2164, + "step": 147 + }, + { + "epoch": 0.11571540265832682, + "grad_norm": 0.272840678691864, + "learning_rate": 5.78125e-05, + "loss": 0.2057, + "step": 148 + }, + { + "epoch": 0.1164972634870993, + "grad_norm": 0.27350762486457825, + "learning_rate": 5.8203125e-05, + "loss": 0.2567, + "step": 149 + }, + { + "epoch": 0.11727912431587177, + "grad_norm": 0.2435077726840973, + "learning_rate": 5.8593750000000005e-05, + "loss": 0.181, + "step": 150 + }, + { + "epoch": 0.11806098514464425, + "grad_norm": 0.3440535068511963, + "learning_rate": 5.8984375e-05, + "loss": 0.2955, + "step": 151 + }, + { + "epoch": 0.11884284597341674, + "grad_norm": 0.30235132575035095, + "learning_rate": 5.9375e-05, + "loss": 0.249, + "step": 152 + }, + { + "epoch": 0.11962470680218922, + "grad_norm": 0.1905151903629303, + "learning_rate": 5.9765625000000004e-05, + "loss": 0.2057, + "step": 153 + }, + { + "epoch": 0.12040656763096169, + "grad_norm": 0.2980440855026245, + "learning_rate": 6.015625e-05, + "loss": 0.2824, + "step": 154 + }, + { + "epoch": 0.12118842845973417, + "grad_norm": 0.21735329926013947, + "learning_rate": 6.0546875e-05, + "loss": 0.187, + "step": 155 + }, + { + "epoch": 0.12197028928850664, + "grad_norm": 0.2793501317501068, + "learning_rate": 6.0937500000000004e-05, + "loss": 0.2543, + "step": 156 + }, + { + "epoch": 0.12275215011727912, + "grad_norm": 0.26220038533210754, + "learning_rate": 6.132812500000001e-05, + "loss": 0.2218, + "step": 157 + }, + { + "epoch": 0.1235340109460516, + "grad_norm": 0.31638094782829285, + "learning_rate": 6.171875e-05, + "loss": 0.2593, + "step": 158 + }, + { + "epoch": 0.12431587177482409, + "grad_norm": 0.2769337296485901, + "learning_rate": 6.2109375e-05, + "loss": 0.2016, + "step": 159 + }, + { + "epoch": 0.12509773260359655, + "grad_norm": 0.42148536443710327, + "learning_rate": 6.25e-05, + "loss": 0.2441, + "step": 160 + }, + { + "epoch": 0.12587959343236904, + "grad_norm": 0.30849018692970276, + "learning_rate": 6.2890625e-05, + "loss": 0.2421, + "step": 161 + }, + { + "epoch": 0.1266614542611415, + "grad_norm": 0.27945342659950256, + "learning_rate": 6.328125e-05, + "loss": 0.1959, + "step": 162 + }, + { + "epoch": 0.127443315089914, + "grad_norm": 0.23380780220031738, + "learning_rate": 6.367187500000001e-05, + "loss": 0.1951, + "step": 163 + }, + { + "epoch": 0.12822517591868648, + "grad_norm": 0.3297896087169647, + "learning_rate": 6.40625e-05, + "loss": 0.2182, + "step": 164 + }, + { + "epoch": 0.12900703674745895, + "grad_norm": 0.4288419783115387, + "learning_rate": 6.4453125e-05, + "loss": 0.2286, + "step": 165 + }, + { + "epoch": 0.12978889757623144, + "grad_norm": 0.4745989143848419, + "learning_rate": 6.484375e-05, + "loss": 0.1925, + "step": 166 + }, + { + "epoch": 0.1305707584050039, + "grad_norm": 0.4383123815059662, + "learning_rate": 6.5234375e-05, + "loss": 0.1877, + "step": 167 + }, + { + "epoch": 0.1313526192337764, + "grad_norm": 0.33036768436431885, + "learning_rate": 6.562500000000001e-05, + "loss": 0.2172, + "step": 168 + }, + { + "epoch": 0.13213448006254885, + "grad_norm": 0.2799517810344696, + "learning_rate": 6.601562500000001e-05, + "loss": 0.1766, + "step": 169 + }, + { + "epoch": 0.13291634089132134, + "grad_norm": 0.3703453242778778, + "learning_rate": 6.640625e-05, + "loss": 0.207, + "step": 170 + }, + { + "epoch": 0.13369820172009383, + "grad_norm": 0.29083386063575745, + "learning_rate": 6.6796875e-05, + "loss": 0.2675, + "step": 171 + }, + { + "epoch": 0.1344800625488663, + "grad_norm": 0.3545566499233246, + "learning_rate": 6.71875e-05, + "loss": 0.2692, + "step": 172 + }, + { + "epoch": 0.1352619233776388, + "grad_norm": 0.2916538119316101, + "learning_rate": 6.7578125e-05, + "loss": 0.1521, + "step": 173 + }, + { + "epoch": 0.13604378420641125, + "grad_norm": 0.3869919776916504, + "learning_rate": 6.796875000000001e-05, + "loss": 0.2785, + "step": 174 + }, + { + "epoch": 0.13682564503518374, + "grad_norm": 0.32206371426582336, + "learning_rate": 6.8359375e-05, + "loss": 0.2055, + "step": 175 + }, + { + "epoch": 0.1376075058639562, + "grad_norm": 0.47468236088752747, + "learning_rate": 6.875e-05, + "loss": 0.2493, + "step": 176 + }, + { + "epoch": 0.1383893666927287, + "grad_norm": 0.23824159801006317, + "learning_rate": 6.9140625e-05, + "loss": 0.1699, + "step": 177 + }, + { + "epoch": 0.13917122752150118, + "grad_norm": 0.5079713463783264, + "learning_rate": 6.953125e-05, + "loss": 0.2437, + "step": 178 + }, + { + "epoch": 0.13995308835027365, + "grad_norm": 0.3343154489994049, + "learning_rate": 6.9921875e-05, + "loss": 0.1674, + "step": 179 + }, + { + "epoch": 0.14073494917904614, + "grad_norm": 0.3303624093532562, + "learning_rate": 7.031250000000001e-05, + "loss": 0.1942, + "step": 180 + }, + { + "epoch": 0.1415168100078186, + "grad_norm": 0.48036229610443115, + "learning_rate": 7.0703125e-05, + "loss": 0.2186, + "step": 181 + }, + { + "epoch": 0.1422986708365911, + "grad_norm": 0.40403860807418823, + "learning_rate": 7.109375e-05, + "loss": 0.2192, + "step": 182 + }, + { + "epoch": 0.14308053166536355, + "grad_norm": 0.4421844184398651, + "learning_rate": 7.1484375e-05, + "loss": 0.1923, + "step": 183 + }, + { + "epoch": 0.14386239249413604, + "grad_norm": 0.39282283186912537, + "learning_rate": 7.1875e-05, + "loss": 0.2658, + "step": 184 + }, + { + "epoch": 0.14464425332290853, + "grad_norm": 0.29992353916168213, + "learning_rate": 7.226562500000001e-05, + "loss": 0.232, + "step": 185 + }, + { + "epoch": 0.145426114151681, + "grad_norm": 0.4302484095096588, + "learning_rate": 7.265625000000001e-05, + "loss": 0.2118, + "step": 186 + }, + { + "epoch": 0.1462079749804535, + "grad_norm": 0.3942473232746124, + "learning_rate": 7.3046875e-05, + "loss": 0.2664, + "step": 187 + }, + { + "epoch": 0.14698983580922595, + "grad_norm": 0.38469308614730835, + "learning_rate": 7.34375e-05, + "loss": 0.1307, + "step": 188 + }, + { + "epoch": 0.14777169663799844, + "grad_norm": 0.2901390790939331, + "learning_rate": 7.3828125e-05, + "loss": 0.2786, + "step": 189 + }, + { + "epoch": 0.1485535574667709, + "grad_norm": 0.28935694694519043, + "learning_rate": 7.421875e-05, + "loss": 0.1513, + "step": 190 + }, + { + "epoch": 0.1493354182955434, + "grad_norm": 0.28370222449302673, + "learning_rate": 7.460937500000001e-05, + "loss": 0.2446, + "step": 191 + }, + { + "epoch": 0.15011727912431588, + "grad_norm": 0.36691758036613464, + "learning_rate": 7.500000000000001e-05, + "loss": 0.2114, + "step": 192 + }, + { + "epoch": 0.15089913995308835, + "grad_norm": 0.41656333208084106, + "learning_rate": 7.5390625e-05, + "loss": 0.2144, + "step": 193 + }, + { + "epoch": 0.15168100078186084, + "grad_norm": 0.34387269616127014, + "learning_rate": 7.578125e-05, + "loss": 0.2113, + "step": 194 + }, + { + "epoch": 0.1524628616106333, + "grad_norm": 0.33604809641838074, + "learning_rate": 7.6171875e-05, + "loss": 0.1784, + "step": 195 + }, + { + "epoch": 0.1532447224394058, + "grad_norm": 0.26703089475631714, + "learning_rate": 7.65625e-05, + "loss": 0.1617, + "step": 196 + }, + { + "epoch": 0.15402658326817825, + "grad_norm": 0.35461607575416565, + "learning_rate": 7.695312500000001e-05, + "loss": 0.2431, + "step": 197 + }, + { + "epoch": 0.15480844409695074, + "grad_norm": 0.35807040333747864, + "learning_rate": 7.734375e-05, + "loss": 0.1861, + "step": 198 + }, + { + "epoch": 0.15559030492572323, + "grad_norm": 0.3112034499645233, + "learning_rate": 7.7734375e-05, + "loss": 0.2142, + "step": 199 + }, + { + "epoch": 0.1563721657544957, + "grad_norm": 0.4752049446105957, + "learning_rate": 7.8125e-05, + "loss": 0.2322, + "step": 200 + }, + { + "epoch": 0.1563721657544957, + "eval_loss": 0.21434074640274048, + "eval_runtime": 13.3101, + "eval_samples_per_second": 3.907, + "eval_steps_per_second": 0.977, + "step": 200 + }, + { + "epoch": 0.15715402658326819, + "grad_norm": 0.35235124826431274, + "learning_rate": 7.8515625e-05, + "loss": 0.1912, + "step": 201 + }, + { + "epoch": 0.15793588741204065, + "grad_norm": 0.26190364360809326, + "learning_rate": 7.890625000000001e-05, + "loss": 0.1864, + "step": 202 + }, + { + "epoch": 0.15871774824081314, + "grad_norm": 0.4348473846912384, + "learning_rate": 7.929687500000001e-05, + "loss": 0.2316, + "step": 203 + }, + { + "epoch": 0.1594996090695856, + "grad_norm": 0.3678869605064392, + "learning_rate": 7.96875e-05, + "loss": 0.2044, + "step": 204 + }, + { + "epoch": 0.1602814698983581, + "grad_norm": 0.3102744221687317, + "learning_rate": 8.0078125e-05, + "loss": 0.2061, + "step": 205 + }, + { + "epoch": 0.16106333072713058, + "grad_norm": 0.370443731546402, + "learning_rate": 8.046875e-05, + "loss": 0.2046, + "step": 206 + }, + { + "epoch": 0.16184519155590305, + "grad_norm": 0.4026085138320923, + "learning_rate": 8.0859375e-05, + "loss": 0.2202, + "step": 207 + }, + { + "epoch": 0.16262705238467554, + "grad_norm": 0.30296698212623596, + "learning_rate": 8.125000000000001e-05, + "loss": 0.1761, + "step": 208 + }, + { + "epoch": 0.163408913213448, + "grad_norm": 0.3474259674549103, + "learning_rate": 8.164062500000001e-05, + "loss": 0.1942, + "step": 209 + }, + { + "epoch": 0.1641907740422205, + "grad_norm": 0.32637667655944824, + "learning_rate": 8.203125e-05, + "loss": 0.1872, + "step": 210 + }, + { + "epoch": 0.16497263487099295, + "grad_norm": 0.3576767146587372, + "learning_rate": 8.2421875e-05, + "loss": 0.1869, + "step": 211 + }, + { + "epoch": 0.16575449569976544, + "grad_norm": 0.3988786041736603, + "learning_rate": 8.28125e-05, + "loss": 0.2351, + "step": 212 + }, + { + "epoch": 0.16653635652853793, + "grad_norm": 0.28785213828086853, + "learning_rate": 8.3203125e-05, + "loss": 0.1558, + "step": 213 + }, + { + "epoch": 0.1673182173573104, + "grad_norm": 0.527788519859314, + "learning_rate": 8.359375000000001e-05, + "loss": 0.2125, + "step": 214 + }, + { + "epoch": 0.16810007818608289, + "grad_norm": 0.3742121458053589, + "learning_rate": 8.398437500000001e-05, + "loss": 0.1884, + "step": 215 + }, + { + "epoch": 0.16888193901485535, + "grad_norm": 0.4711468517780304, + "learning_rate": 8.4375e-05, + "loss": 0.1743, + "step": 216 + }, + { + "epoch": 0.16966379984362784, + "grad_norm": 0.6251031756401062, + "learning_rate": 8.4765625e-05, + "loss": 0.2971, + "step": 217 + }, + { + "epoch": 0.1704456606724003, + "grad_norm": 0.4275197684764862, + "learning_rate": 8.515625e-05, + "loss": 0.1872, + "step": 218 + }, + { + "epoch": 0.1712275215011728, + "grad_norm": 0.313466340303421, + "learning_rate": 8.5546875e-05, + "loss": 0.1916, + "step": 219 + }, + { + "epoch": 0.17200938232994528, + "grad_norm": 0.3586588501930237, + "learning_rate": 8.593750000000001e-05, + "loss": 0.1848, + "step": 220 + }, + { + "epoch": 0.17279124315871774, + "grad_norm": 0.4977078139781952, + "learning_rate": 8.6328125e-05, + "loss": 0.2476, + "step": 221 + }, + { + "epoch": 0.17357310398749023, + "grad_norm": 0.35769525170326233, + "learning_rate": 8.671875e-05, + "loss": 0.199, + "step": 222 + }, + { + "epoch": 0.1743549648162627, + "grad_norm": 0.4167306423187256, + "learning_rate": 8.7109375e-05, + "loss": 0.1861, + "step": 223 + }, + { + "epoch": 0.1751368256450352, + "grad_norm": 0.3895793557167053, + "learning_rate": 8.75e-05, + "loss": 0.1772, + "step": 224 + }, + { + "epoch": 0.17591868647380765, + "grad_norm": 0.3502638339996338, + "learning_rate": 8.789062500000001e-05, + "loss": 0.2047, + "step": 225 + }, + { + "epoch": 0.17670054730258014, + "grad_norm": 0.3090845048427582, + "learning_rate": 8.828125000000001e-05, + "loss": 0.1812, + "step": 226 + }, + { + "epoch": 0.17748240813135263, + "grad_norm": 0.40732601284980774, + "learning_rate": 8.8671875e-05, + "loss": 0.2121, + "step": 227 + }, + { + "epoch": 0.1782642689601251, + "grad_norm": 0.42500749230384827, + "learning_rate": 8.90625e-05, + "loss": 0.19, + "step": 228 + }, + { + "epoch": 0.17904612978889758, + "grad_norm": 0.2970224916934967, + "learning_rate": 8.9453125e-05, + "loss": 0.1517, + "step": 229 + }, + { + "epoch": 0.17982799061767005, + "grad_norm": 0.3494272530078888, + "learning_rate": 8.984375e-05, + "loss": 0.2368, + "step": 230 + }, + { + "epoch": 0.18060985144644254, + "grad_norm": 0.4563659727573395, + "learning_rate": 9.023437500000001e-05, + "loss": 0.1987, + "step": 231 + }, + { + "epoch": 0.181391712275215, + "grad_norm": 0.4436132311820984, + "learning_rate": 9.062500000000001e-05, + "loss": 0.2369, + "step": 232 + }, + { + "epoch": 0.1821735731039875, + "grad_norm": 0.4118313789367676, + "learning_rate": 9.1015625e-05, + "loss": 0.1623, + "step": 233 + }, + { + "epoch": 0.18295543393275998, + "grad_norm": 0.535983681678772, + "learning_rate": 9.140625e-05, + "loss": 0.1718, + "step": 234 + }, + { + "epoch": 0.18373729476153244, + "grad_norm": 0.4556559920310974, + "learning_rate": 9.1796875e-05, + "loss": 0.209, + "step": 235 + }, + { + "epoch": 0.18451915559030493, + "grad_norm": 0.43632426857948303, + "learning_rate": 9.21875e-05, + "loss": 0.1622, + "step": 236 + }, + { + "epoch": 0.1853010164190774, + "grad_norm": 0.34906676411628723, + "learning_rate": 9.257812500000001e-05, + "loss": 0.2026, + "step": 237 + }, + { + "epoch": 0.1860828772478499, + "grad_norm": 0.3979455828666687, + "learning_rate": 9.296875e-05, + "loss": 0.1893, + "step": 238 + }, + { + "epoch": 0.18686473807662235, + "grad_norm": 0.31991952657699585, + "learning_rate": 9.3359375e-05, + "loss": 0.1957, + "step": 239 + }, + { + "epoch": 0.18764659890539484, + "grad_norm": 0.3094921112060547, + "learning_rate": 9.375e-05, + "loss": 0.1545, + "step": 240 + }, + { + "epoch": 0.18842845973416733, + "grad_norm": 0.3545514941215515, + "learning_rate": 9.4140625e-05, + "loss": 0.2365, + "step": 241 + }, + { + "epoch": 0.1892103205629398, + "grad_norm": 0.3044324815273285, + "learning_rate": 9.453125000000001e-05, + "loss": 0.1804, + "step": 242 + }, + { + "epoch": 0.18999218139171228, + "grad_norm": 0.3272997736930847, + "learning_rate": 9.492187500000001e-05, + "loss": 0.1591, + "step": 243 + }, + { + "epoch": 0.19077404222048475, + "grad_norm": 0.4019821286201477, + "learning_rate": 9.53125e-05, + "loss": 0.1935, + "step": 244 + }, + { + "epoch": 0.19155590304925724, + "grad_norm": 0.3890402615070343, + "learning_rate": 9.5703125e-05, + "loss": 0.1816, + "step": 245 + }, + { + "epoch": 0.1923377638780297, + "grad_norm": 0.39056506752967834, + "learning_rate": 9.609375e-05, + "loss": 0.1788, + "step": 246 + }, + { + "epoch": 0.1931196247068022, + "grad_norm": 0.47024020552635193, + "learning_rate": 9.6484375e-05, + "loss": 0.1949, + "step": 247 + }, + { + "epoch": 0.19390148553557468, + "grad_norm": 0.31551235914230347, + "learning_rate": 9.687500000000001e-05, + "loss": 0.1469, + "step": 248 + }, + { + "epoch": 0.19468334636434714, + "grad_norm": 0.4270898103713989, + "learning_rate": 9.726562500000001e-05, + "loss": 0.1873, + "step": 249 + }, + { + "epoch": 0.19546520719311963, + "grad_norm": 0.34933722019195557, + "learning_rate": 9.765625e-05, + "loss": 0.2114, + "step": 250 + }, + { + "epoch": 0.1962470680218921, + "grad_norm": 0.32482168078422546, + "learning_rate": 9.8046875e-05, + "loss": 0.1765, + "step": 251 + }, + { + "epoch": 0.1970289288506646, + "grad_norm": 0.36024120450019836, + "learning_rate": 9.84375e-05, + "loss": 0.185, + "step": 252 + }, + { + "epoch": 0.19781078967943705, + "grad_norm": 0.36847880482673645, + "learning_rate": 9.8828125e-05, + "loss": 0.1791, + "step": 253 + }, + { + "epoch": 0.19859265050820954, + "grad_norm": 0.487062007188797, + "learning_rate": 9.921875000000001e-05, + "loss": 0.2098, + "step": 254 + }, + { + "epoch": 0.19937451133698203, + "grad_norm": 0.4240575134754181, + "learning_rate": 9.960937500000001e-05, + "loss": 0.1558, + "step": 255 + }, + { + "epoch": 0.2001563721657545, + "grad_norm": 0.4850272238254547, + "learning_rate": 0.0001, + "loss": 0.2066, + "step": 256 + }, + { + "epoch": 0.20093823299452698, + "grad_norm": 0.2975417375564575, + "learning_rate": 9.999995343827644e-05, + "loss": 0.1519, + "step": 257 + }, + { + "epoch": 0.20172009382329945, + "grad_norm": 0.4383419454097748, + "learning_rate": 9.999981375319249e-05, + "loss": 0.2041, + "step": 258 + }, + { + "epoch": 0.20250195465207194, + "grad_norm": 0.4741787314414978, + "learning_rate": 9.99995809450083e-05, + "loss": 0.186, + "step": 259 + }, + { + "epoch": 0.2032838154808444, + "grad_norm": 0.41490674018859863, + "learning_rate": 9.999925501415746e-05, + "loss": 0.1978, + "step": 260 + }, + { + "epoch": 0.2040656763096169, + "grad_norm": 0.5051758885383606, + "learning_rate": 9.9998835961247e-05, + "loss": 0.2547, + "step": 261 + }, + { + "epoch": 0.20484753713838938, + "grad_norm": 0.2550017535686493, + "learning_rate": 9.999832378705743e-05, + "loss": 0.1447, + "step": 262 + }, + { + "epoch": 0.20562939796716184, + "grad_norm": 0.4408182203769684, + "learning_rate": 9.999771849254263e-05, + "loss": 0.1724, + "step": 263 + }, + { + "epoch": 0.20641125879593433, + "grad_norm": 0.45843225717544556, + "learning_rate": 9.999702007882995e-05, + "loss": 0.198, + "step": 264 + }, + { + "epoch": 0.2071931196247068, + "grad_norm": 0.3172323703765869, + "learning_rate": 9.999622854722017e-05, + "loss": 0.2108, + "step": 265 + }, + { + "epoch": 0.20797498045347929, + "grad_norm": 0.403096467256546, + "learning_rate": 9.999534389918747e-05, + "loss": 0.1631, + "step": 266 + }, + { + "epoch": 0.20875684128225175, + "grad_norm": 0.28646552562713623, + "learning_rate": 9.99943661363795e-05, + "loss": 0.1384, + "step": 267 + }, + { + "epoch": 0.20953870211102424, + "grad_norm": 0.5032647252082825, + "learning_rate": 9.999329526061731e-05, + "loss": 0.2253, + "step": 268 + }, + { + "epoch": 0.21032056293979673, + "grad_norm": 0.4821566045284271, + "learning_rate": 9.999213127389536e-05, + "loss": 0.2096, + "step": 269 + }, + { + "epoch": 0.2111024237685692, + "grad_norm": 0.36564865708351135, + "learning_rate": 9.999087417838156e-05, + "loss": 0.1496, + "step": 270 + }, + { + "epoch": 0.21188428459734168, + "grad_norm": 0.3803997039794922, + "learning_rate": 9.99895239764172e-05, + "loss": 0.1445, + "step": 271 + }, + { + "epoch": 0.21266614542611414, + "grad_norm": 0.3326912522315979, + "learning_rate": 9.9988080670517e-05, + "loss": 0.1422, + "step": 272 + }, + { + "epoch": 0.21344800625488664, + "grad_norm": 0.44342145323753357, + "learning_rate": 9.998654426336905e-05, + "loss": 0.1902, + "step": 273 + }, + { + "epoch": 0.2142298670836591, + "grad_norm": 0.3668600618839264, + "learning_rate": 9.998491475783487e-05, + "loss": 0.2203, + "step": 274 + }, + { + "epoch": 0.2150117279124316, + "grad_norm": 0.49768349528312683, + "learning_rate": 9.998319215694936e-05, + "loss": 0.1857, + "step": 275 + }, + { + "epoch": 0.21579358874120408, + "grad_norm": 0.4042830765247345, + "learning_rate": 9.998137646392083e-05, + "loss": 0.1858, + "step": 276 + }, + { + "epoch": 0.21657544956997654, + "grad_norm": 0.36947476863861084, + "learning_rate": 9.997946768213095e-05, + "loss": 0.2113, + "step": 277 + }, + { + "epoch": 0.21735731039874903, + "grad_norm": 0.32007378339767456, + "learning_rate": 9.997746581513474e-05, + "loss": 0.2095, + "step": 278 + }, + { + "epoch": 0.2181391712275215, + "grad_norm": 0.3947068452835083, + "learning_rate": 9.997537086666063e-05, + "loss": 0.1986, + "step": 279 + }, + { + "epoch": 0.21892103205629398, + "grad_norm": 0.31826138496398926, + "learning_rate": 9.997318284061041e-05, + "loss": 0.1518, + "step": 280 + }, + { + "epoch": 0.21970289288506645, + "grad_norm": 0.5408663749694824, + "learning_rate": 9.997090174105919e-05, + "loss": 0.1945, + "step": 281 + }, + { + "epoch": 0.22048475371383894, + "grad_norm": 0.36804401874542236, + "learning_rate": 9.996852757225546e-05, + "loss": 0.16, + "step": 282 + }, + { + "epoch": 0.22126661454261143, + "grad_norm": 0.3286680579185486, + "learning_rate": 9.996606033862102e-05, + "loss": 0.1548, + "step": 283 + }, + { + "epoch": 0.2220484753713839, + "grad_norm": 0.36132723093032837, + "learning_rate": 9.996350004475104e-05, + "loss": 0.2245, + "step": 284 + }, + { + "epoch": 0.22283033620015638, + "grad_norm": 0.38861459493637085, + "learning_rate": 9.996084669541397e-05, + "loss": 0.2044, + "step": 285 + }, + { + "epoch": 0.22361219702892884, + "grad_norm": 0.5624184608459473, + "learning_rate": 9.995810029555159e-05, + "loss": 0.1935, + "step": 286 + }, + { + "epoch": 0.22439405785770133, + "grad_norm": 0.4298049807548523, + "learning_rate": 9.9955260850279e-05, + "loss": 0.1332, + "step": 287 + }, + { + "epoch": 0.2251759186864738, + "grad_norm": 0.36704859137535095, + "learning_rate": 9.995232836488455e-05, + "loss": 0.1673, + "step": 288 + }, + { + "epoch": 0.2259577795152463, + "grad_norm": 0.2979743182659149, + "learning_rate": 9.994930284482993e-05, + "loss": 0.1472, + "step": 289 + }, + { + "epoch": 0.22673964034401878, + "grad_norm": 0.36498042941093445, + "learning_rate": 9.994618429575008e-05, + "loss": 0.1409, + "step": 290 + }, + { + "epoch": 0.22752150117279124, + "grad_norm": 0.2840629816055298, + "learning_rate": 9.994297272345319e-05, + "loss": 0.126, + "step": 291 + }, + { + "epoch": 0.22830336200156373, + "grad_norm": 0.4682474732398987, + "learning_rate": 9.99396681339207e-05, + "loss": 0.2315, + "step": 292 + }, + { + "epoch": 0.2290852228303362, + "grad_norm": 0.4364871084690094, + "learning_rate": 9.993627053330732e-05, + "loss": 0.1534, + "step": 293 + }, + { + "epoch": 0.22986708365910868, + "grad_norm": 0.4619438350200653, + "learning_rate": 9.993277992794096e-05, + "loss": 0.186, + "step": 294 + }, + { + "epoch": 0.23064894448788115, + "grad_norm": 0.47760748863220215, + "learning_rate": 9.99291963243228e-05, + "loss": 0.182, + "step": 295 + }, + { + "epoch": 0.23143080531665364, + "grad_norm": 0.3896167576313019, + "learning_rate": 9.992551972912718e-05, + "loss": 0.1883, + "step": 296 + }, + { + "epoch": 0.23221266614542613, + "grad_norm": 0.4022518992424011, + "learning_rate": 9.992175014920161e-05, + "loss": 0.1948, + "step": 297 + }, + { + "epoch": 0.2329945269741986, + "grad_norm": 0.382075697183609, + "learning_rate": 9.991788759156684e-05, + "loss": 0.1836, + "step": 298 + }, + { + "epoch": 0.23377638780297108, + "grad_norm": 0.35182997584342957, + "learning_rate": 9.991393206341677e-05, + "loss": 0.1215, + "step": 299 + }, + { + "epoch": 0.23455824863174354, + "grad_norm": 0.2927306890487671, + "learning_rate": 9.990988357211843e-05, + "loss": 0.127, + "step": 300 + }, + { + "epoch": 0.23534010946051603, + "grad_norm": 0.31669214367866516, + "learning_rate": 9.990574212521205e-05, + "loss": 0.1553, + "step": 301 + }, + { + "epoch": 0.2361219702892885, + "grad_norm": 0.39152827858924866, + "learning_rate": 9.990150773041089e-05, + "loss": 0.1672, + "step": 302 + }, + { + "epoch": 0.236903831118061, + "grad_norm": 0.4005658030509949, + "learning_rate": 9.98971803956014e-05, + "loss": 0.2034, + "step": 303 + }, + { + "epoch": 0.23768569194683348, + "grad_norm": 0.3976832330226898, + "learning_rate": 9.989276012884313e-05, + "loss": 0.1498, + "step": 304 + }, + { + "epoch": 0.23846755277560594, + "grad_norm": 0.36635416746139526, + "learning_rate": 9.988824693836864e-05, + "loss": 0.1765, + "step": 305 + }, + { + "epoch": 0.23924941360437843, + "grad_norm": 0.44994375109672546, + "learning_rate": 9.988364083258367e-05, + "loss": 0.201, + "step": 306 + }, + { + "epoch": 0.2400312744331509, + "grad_norm": 0.35641130805015564, + "learning_rate": 9.98789418200669e-05, + "loss": 0.1594, + "step": 307 + }, + { + "epoch": 0.24081313526192338, + "grad_norm": 0.5840378999710083, + "learning_rate": 9.987414990957012e-05, + "loss": 0.2061, + "step": 308 + }, + { + "epoch": 0.24159499609069585, + "grad_norm": 0.3115840256214142, + "learning_rate": 9.98692651100181e-05, + "loss": 0.1272, + "step": 309 + }, + { + "epoch": 0.24237685691946834, + "grad_norm": 0.30972468852996826, + "learning_rate": 9.986428743050864e-05, + "loss": 0.1711, + "step": 310 + }, + { + "epoch": 0.24315871774824083, + "grad_norm": 0.34734436869621277, + "learning_rate": 9.985921688031252e-05, + "loss": 0.1629, + "step": 311 + }, + { + "epoch": 0.2439405785770133, + "grad_norm": 0.3847326338291168, + "learning_rate": 9.985405346887345e-05, + "loss": 0.2059, + "step": 312 + }, + { + "epoch": 0.24472243940578578, + "grad_norm": 0.43368545174598694, + "learning_rate": 9.984879720580816e-05, + "loss": 0.221, + "step": 313 + }, + { + "epoch": 0.24550430023455824, + "grad_norm": 0.3668220043182373, + "learning_rate": 9.984344810090623e-05, + "loss": 0.1626, + "step": 314 + }, + { + "epoch": 0.24628616106333073, + "grad_norm": 0.3731725215911865, + "learning_rate": 9.983800616413026e-05, + "loss": 0.142, + "step": 315 + }, + { + "epoch": 0.2470680218921032, + "grad_norm": 0.4757682681083679, + "learning_rate": 9.983247140561565e-05, + "loss": 0.1747, + "step": 316 + }, + { + "epoch": 0.24784988272087569, + "grad_norm": 0.3016088306903839, + "learning_rate": 9.982684383567071e-05, + "loss": 0.1177, + "step": 317 + }, + { + "epoch": 0.24863174354964818, + "grad_norm": 0.32493770122528076, + "learning_rate": 9.982112346477666e-05, + "loss": 0.1315, + "step": 318 + }, + { + "epoch": 0.24941360437842064, + "grad_norm": 0.2876770794391632, + "learning_rate": 9.981531030358746e-05, + "loss": 0.1167, + "step": 319 + }, + { + "epoch": 0.2501954652071931, + "grad_norm": 0.45965278148651123, + "learning_rate": 9.980940436292999e-05, + "loss": 0.1888, + "step": 320 + }, + { + "epoch": 0.2509773260359656, + "grad_norm": 0.4690196216106415, + "learning_rate": 9.980340565380382e-05, + "loss": 0.1482, + "step": 321 + }, + { + "epoch": 0.2517591868647381, + "grad_norm": 0.4229646623134613, + "learning_rate": 9.979731418738143e-05, + "loss": 0.1665, + "step": 322 + }, + { + "epoch": 0.25254104769351055, + "grad_norm": 0.33682507276535034, + "learning_rate": 9.979112997500792e-05, + "loss": 0.1573, + "step": 323 + }, + { + "epoch": 0.253322908522283, + "grad_norm": 0.44713473320007324, + "learning_rate": 9.978485302820125e-05, + "loss": 0.1575, + "step": 324 + }, + { + "epoch": 0.2541047693510555, + "grad_norm": 0.38661685585975647, + "learning_rate": 9.9778483358652e-05, + "loss": 0.123, + "step": 325 + }, + { + "epoch": 0.254886630179828, + "grad_norm": 0.41950613260269165, + "learning_rate": 9.977202097822352e-05, + "loss": 0.1819, + "step": 326 + }, + { + "epoch": 0.25566849100860045, + "grad_norm": 0.3908550441265106, + "learning_rate": 9.976546589895175e-05, + "loss": 0.164, + "step": 327 + }, + { + "epoch": 0.25645035183737297, + "grad_norm": 0.481545627117157, + "learning_rate": 9.975881813304535e-05, + "loss": 0.1764, + "step": 328 + }, + { + "epoch": 0.25723221266614543, + "grad_norm": 0.4087935984134674, + "learning_rate": 9.975207769288556e-05, + "loss": 0.1559, + "step": 329 + }, + { + "epoch": 0.2580140734949179, + "grad_norm": 0.285412073135376, + "learning_rate": 9.974524459102625e-05, + "loss": 0.1612, + "step": 330 + }, + { + "epoch": 0.25879593432369036, + "grad_norm": 0.413352906703949, + "learning_rate": 9.973831884019387e-05, + "loss": 0.1504, + "step": 331 + }, + { + "epoch": 0.2595777951524629, + "grad_norm": 0.48298683762550354, + "learning_rate": 9.97313004532874e-05, + "loss": 0.2061, + "step": 332 + }, + { + "epoch": 0.26035965598123534, + "grad_norm": 0.4352918863296509, + "learning_rate": 9.972418944337835e-05, + "loss": 0.2174, + "step": 333 + }, + { + "epoch": 0.2611415168100078, + "grad_norm": 0.3502805233001709, + "learning_rate": 9.971698582371081e-05, + "loss": 0.1882, + "step": 334 + }, + { + "epoch": 0.2619233776387803, + "grad_norm": 0.3264360725879669, + "learning_rate": 9.970968960770124e-05, + "loss": 0.1216, + "step": 335 + }, + { + "epoch": 0.2627052384675528, + "grad_norm": 0.42405977845191956, + "learning_rate": 9.970230080893866e-05, + "loss": 0.1781, + "step": 336 + }, + { + "epoch": 0.26348709929632524, + "grad_norm": 0.30436626076698303, + "learning_rate": 9.969481944118443e-05, + "loss": 0.1665, + "step": 337 + }, + { + "epoch": 0.2642689601250977, + "grad_norm": 0.3381073474884033, + "learning_rate": 9.968724551837243e-05, + "loss": 0.1661, + "step": 338 + }, + { + "epoch": 0.2650508209538702, + "grad_norm": 0.3266483545303345, + "learning_rate": 9.96795790546088e-05, + "loss": 0.1219, + "step": 339 + }, + { + "epoch": 0.2658326817826427, + "grad_norm": 0.31733569502830505, + "learning_rate": 9.967182006417212e-05, + "loss": 0.1555, + "step": 340 + }, + { + "epoch": 0.26661454261141515, + "grad_norm": 0.41691651940345764, + "learning_rate": 9.966396856151326e-05, + "loss": 0.192, + "step": 341 + }, + { + "epoch": 0.26739640344018767, + "grad_norm": 0.4389062225818634, + "learning_rate": 9.965602456125538e-05, + "loss": 0.2015, + "step": 342 + }, + { + "epoch": 0.26817826426896013, + "grad_norm": 0.3703081011772156, + "learning_rate": 9.964798807819397e-05, + "loss": 0.1676, + "step": 343 + }, + { + "epoch": 0.2689601250977326, + "grad_norm": 0.4071272909641266, + "learning_rate": 9.963985912729671e-05, + "loss": 0.1548, + "step": 344 + }, + { + "epoch": 0.26974198592650506, + "grad_norm": 0.3323352634906769, + "learning_rate": 9.963163772370352e-05, + "loss": 0.1208, + "step": 345 + }, + { + "epoch": 0.2705238467552776, + "grad_norm": 0.40458884835243225, + "learning_rate": 9.962332388272652e-05, + "loss": 0.1631, + "step": 346 + }, + { + "epoch": 0.27130570758405004, + "grad_norm": 0.3851662576198578, + "learning_rate": 9.961491761984996e-05, + "loss": 0.1305, + "step": 347 + }, + { + "epoch": 0.2720875684128225, + "grad_norm": 0.3678821325302124, + "learning_rate": 9.960641895073026e-05, + "loss": 0.1621, + "step": 348 + }, + { + "epoch": 0.272869429241595, + "grad_norm": 0.37880757451057434, + "learning_rate": 9.959782789119592e-05, + "loss": 0.1966, + "step": 349 + }, + { + "epoch": 0.2736512900703675, + "grad_norm": 0.43384692072868347, + "learning_rate": 9.958914445724754e-05, + "loss": 0.1812, + "step": 350 + }, + { + "epoch": 0.27443315089913994, + "grad_norm": 0.3404906094074249, + "learning_rate": 9.958036866505772e-05, + "loss": 0.1585, + "step": 351 + }, + { + "epoch": 0.2752150117279124, + "grad_norm": 0.2821943759918213, + "learning_rate": 9.95715005309711e-05, + "loss": 0.1368, + "step": 352 + }, + { + "epoch": 0.2759968725566849, + "grad_norm": 0.37595340609550476, + "learning_rate": 9.956254007150432e-05, + "loss": 0.1723, + "step": 353 + }, + { + "epoch": 0.2767787333854574, + "grad_norm": 0.41832900047302246, + "learning_rate": 9.955348730334595e-05, + "loss": 0.1956, + "step": 354 + }, + { + "epoch": 0.27756059421422985, + "grad_norm": 0.40022769570350647, + "learning_rate": 9.954434224335649e-05, + "loss": 0.1455, + "step": 355 + }, + { + "epoch": 0.27834245504300237, + "grad_norm": 0.2701699137687683, + "learning_rate": 9.953510490856834e-05, + "loss": 0.1547, + "step": 356 + }, + { + "epoch": 0.27912431587177483, + "grad_norm": 0.39053916931152344, + "learning_rate": 9.952577531618574e-05, + "loss": 0.1557, + "step": 357 + }, + { + "epoch": 0.2799061767005473, + "grad_norm": 0.3268284797668457, + "learning_rate": 9.951635348358476e-05, + "loss": 0.1425, + "step": 358 + }, + { + "epoch": 0.28068803752931976, + "grad_norm": 0.3364890217781067, + "learning_rate": 9.950683942831328e-05, + "loss": 0.1423, + "step": 359 + }, + { + "epoch": 0.2814698983580923, + "grad_norm": 0.4927947521209717, + "learning_rate": 9.949723316809093e-05, + "loss": 0.2059, + "step": 360 + }, + { + "epoch": 0.28225175918686474, + "grad_norm": 0.40654006600379944, + "learning_rate": 9.948753472080907e-05, + "loss": 0.1614, + "step": 361 + }, + { + "epoch": 0.2830336200156372, + "grad_norm": 0.37723296880722046, + "learning_rate": 9.947774410453077e-05, + "loss": 0.1289, + "step": 362 + }, + { + "epoch": 0.2838154808444097, + "grad_norm": 0.3067987263202667, + "learning_rate": 9.946786133749071e-05, + "loss": 0.1634, + "step": 363 + }, + { + "epoch": 0.2845973416731822, + "grad_norm": 0.49132147431373596, + "learning_rate": 9.94578864380953e-05, + "loss": 0.15, + "step": 364 + }, + { + "epoch": 0.28537920250195464, + "grad_norm": 0.35463565587997437, + "learning_rate": 9.944781942492242e-05, + "loss": 0.162, + "step": 365 + }, + { + "epoch": 0.2861610633307271, + "grad_norm": 0.4066864848136902, + "learning_rate": 9.943766031672159e-05, + "loss": 0.1448, + "step": 366 + }, + { + "epoch": 0.2869429241594996, + "grad_norm": 0.36959394812583923, + "learning_rate": 9.942740913241386e-05, + "loss": 0.1403, + "step": 367 + }, + { + "epoch": 0.2877247849882721, + "grad_norm": 0.3130083382129669, + "learning_rate": 9.94170658910917e-05, + "loss": 0.1549, + "step": 368 + }, + { + "epoch": 0.28850664581704455, + "grad_norm": 0.31939226388931274, + "learning_rate": 9.94066306120191e-05, + "loss": 0.1266, + "step": 369 + }, + { + "epoch": 0.28928850664581707, + "grad_norm": 0.34432169795036316, + "learning_rate": 9.939610331463142e-05, + "loss": 0.1326, + "step": 370 + }, + { + "epoch": 0.29007036747458953, + "grad_norm": 0.6027820706367493, + "learning_rate": 9.938548401853547e-05, + "loss": 0.1911, + "step": 371 + }, + { + "epoch": 0.290852228303362, + "grad_norm": 0.40641260147094727, + "learning_rate": 9.937477274350933e-05, + "loss": 0.1696, + "step": 372 + }, + { + "epoch": 0.29163408913213446, + "grad_norm": 0.4032672643661499, + "learning_rate": 9.93639695095024e-05, + "loss": 0.1581, + "step": 373 + }, + { + "epoch": 0.292415949960907, + "grad_norm": 0.40551677346229553, + "learning_rate": 9.93530743366354e-05, + "loss": 0.1522, + "step": 374 + }, + { + "epoch": 0.29319781078967944, + "grad_norm": 0.41328316926956177, + "learning_rate": 9.934208724520024e-05, + "loss": 0.1404, + "step": 375 + }, + { + "epoch": 0.2939796716184519, + "grad_norm": 0.3744495213031769, + "learning_rate": 9.933100825566002e-05, + "loss": 0.161, + "step": 376 + }, + { + "epoch": 0.2947615324472244, + "grad_norm": 0.3357921242713928, + "learning_rate": 9.931983738864904e-05, + "loss": 0.1528, + "step": 377 + }, + { + "epoch": 0.2955433932759969, + "grad_norm": 0.5647091269493103, + "learning_rate": 9.930857466497268e-05, + "loss": 0.1388, + "step": 378 + }, + { + "epoch": 0.29632525410476934, + "grad_norm": 0.4130406975746155, + "learning_rate": 9.92972201056074e-05, + "loss": 0.1372, + "step": 379 + }, + { + "epoch": 0.2971071149335418, + "grad_norm": 0.4709077775478363, + "learning_rate": 9.928577373170075e-05, + "loss": 0.197, + "step": 380 + }, + { + "epoch": 0.2978889757623143, + "grad_norm": 0.48450398445129395, + "learning_rate": 9.927423556457121e-05, + "loss": 0.1608, + "step": 381 + }, + { + "epoch": 0.2986708365910868, + "grad_norm": 0.35905659198760986, + "learning_rate": 9.926260562570829e-05, + "loss": 0.2213, + "step": 382 + }, + { + "epoch": 0.29945269741985925, + "grad_norm": 0.41346147656440735, + "learning_rate": 9.925088393677236e-05, + "loss": 0.1616, + "step": 383 + }, + { + "epoch": 0.30023455824863177, + "grad_norm": 0.41525787115097046, + "learning_rate": 9.92390705195947e-05, + "loss": 0.1985, + "step": 384 + }, + { + "epoch": 0.30101641907740423, + "grad_norm": 0.34878870844841003, + "learning_rate": 9.922716539617746e-05, + "loss": 0.1704, + "step": 385 + }, + { + "epoch": 0.3017982799061767, + "grad_norm": 0.4187116026878357, + "learning_rate": 9.921516858869355e-05, + "loss": 0.176, + "step": 386 + }, + { + "epoch": 0.30258014073494915, + "grad_norm": 0.35063982009887695, + "learning_rate": 9.920308011948665e-05, + "loss": 0.1408, + "step": 387 + }, + { + "epoch": 0.3033620015637217, + "grad_norm": 0.4096844494342804, + "learning_rate": 9.919090001107114e-05, + "loss": 0.1191, + "step": 388 + }, + { + "epoch": 0.30414386239249414, + "grad_norm": 0.3294246792793274, + "learning_rate": 9.917862828613214e-05, + "loss": 0.1618, + "step": 389 + }, + { + "epoch": 0.3049257232212666, + "grad_norm": 0.32012560963630676, + "learning_rate": 9.916626496752532e-05, + "loss": 0.1109, + "step": 390 + }, + { + "epoch": 0.3057075840500391, + "grad_norm": 0.46081307530403137, + "learning_rate": 9.915381007827698e-05, + "loss": 0.1631, + "step": 391 + }, + { + "epoch": 0.3064894448788116, + "grad_norm": 0.4402189552783966, + "learning_rate": 9.914126364158397e-05, + "loss": 0.13, + "step": 392 + }, + { + "epoch": 0.30727130570758404, + "grad_norm": 0.37083446979522705, + "learning_rate": 9.912862568081364e-05, + "loss": 0.1409, + "step": 393 + }, + { + "epoch": 0.3080531665363565, + "grad_norm": 0.42801108956336975, + "learning_rate": 9.91158962195038e-05, + "loss": 0.1682, + "step": 394 + }, + { + "epoch": 0.308835027365129, + "grad_norm": 0.3493399918079376, + "learning_rate": 9.910307528136266e-05, + "loss": 0.192, + "step": 395 + }, + { + "epoch": 0.3096168881939015, + "grad_norm": 0.42288097739219666, + "learning_rate": 9.909016289026886e-05, + "loss": 0.1677, + "step": 396 + }, + { + "epoch": 0.31039874902267395, + "grad_norm": 0.3308151960372925, + "learning_rate": 9.907715907027129e-05, + "loss": 0.1634, + "step": 397 + }, + { + "epoch": 0.31118060985144647, + "grad_norm": 0.3515646755695343, + "learning_rate": 9.906406384558917e-05, + "loss": 0.1285, + "step": 398 + }, + { + "epoch": 0.31196247068021893, + "grad_norm": 0.41209307312965393, + "learning_rate": 9.905087724061195e-05, + "loss": 0.1758, + "step": 399 + }, + { + "epoch": 0.3127443315089914, + "grad_norm": 0.3443009853363037, + "learning_rate": 9.903759927989929e-05, + "loss": 0.1419, + "step": 400 + }, + { + "epoch": 0.3127443315089914, + "eval_loss": 0.16743157804012299, + "eval_runtime": 13.3792, + "eval_samples_per_second": 3.887, + "eval_steps_per_second": 0.972, + "step": 400 + }, + { + "epoch": 0.31352619233776385, + "grad_norm": 0.2995968461036682, + "learning_rate": 9.902422998818094e-05, + "loss": 0.1303, + "step": 401 + }, + { + "epoch": 0.31430805316653637, + "grad_norm": 0.3437633812427521, + "learning_rate": 9.901076939035683e-05, + "loss": 0.1631, + "step": 402 + }, + { + "epoch": 0.31508991399530883, + "grad_norm": 0.3030770421028137, + "learning_rate": 9.899721751149688e-05, + "loss": 0.1429, + "step": 403 + }, + { + "epoch": 0.3158717748240813, + "grad_norm": 0.3427619934082031, + "learning_rate": 9.898357437684105e-05, + "loss": 0.1401, + "step": 404 + }, + { + "epoch": 0.3166536356528538, + "grad_norm": 0.3895941376686096, + "learning_rate": 9.896984001179925e-05, + "loss": 0.1392, + "step": 405 + }, + { + "epoch": 0.3174354964816263, + "grad_norm": 0.40631383657455444, + "learning_rate": 9.895601444195133e-05, + "loss": 0.1745, + "step": 406 + }, + { + "epoch": 0.31821735731039874, + "grad_norm": 0.3306792080402374, + "learning_rate": 9.894209769304696e-05, + "loss": 0.176, + "step": 407 + }, + { + "epoch": 0.3189992181391712, + "grad_norm": 0.27432066202163696, + "learning_rate": 9.892808979100567e-05, + "loss": 0.1236, + "step": 408 + }, + { + "epoch": 0.3197810789679437, + "grad_norm": 0.32954710721969604, + "learning_rate": 9.891399076191674e-05, + "loss": 0.1191, + "step": 409 + }, + { + "epoch": 0.3205629397967162, + "grad_norm": 0.3939608037471771, + "learning_rate": 9.889980063203916e-05, + "loss": 0.2076, + "step": 410 + }, + { + "epoch": 0.32134480062548865, + "grad_norm": 0.4199574291706085, + "learning_rate": 9.888551942780162e-05, + "loss": 0.1603, + "step": 411 + }, + { + "epoch": 0.32212666145426117, + "grad_norm": 0.35778728127479553, + "learning_rate": 9.887114717580241e-05, + "loss": 0.1764, + "step": 412 + }, + { + "epoch": 0.3229085222830336, + "grad_norm": 0.4640763998031616, + "learning_rate": 9.885668390280941e-05, + "loss": 0.184, + "step": 413 + }, + { + "epoch": 0.3236903831118061, + "grad_norm": 0.4196508824825287, + "learning_rate": 9.884212963576001e-05, + "loss": 0.1607, + "step": 414 + }, + { + "epoch": 0.32447224394057855, + "grad_norm": 0.3575003147125244, + "learning_rate": 9.882748440176109e-05, + "loss": 0.1732, + "step": 415 + }, + { + "epoch": 0.32525410476935107, + "grad_norm": 0.34807106852531433, + "learning_rate": 9.881274822808893e-05, + "loss": 0.1678, + "step": 416 + }, + { + "epoch": 0.32603596559812353, + "grad_norm": 0.25956061482429504, + "learning_rate": 9.879792114218921e-05, + "loss": 0.1139, + "step": 417 + }, + { + "epoch": 0.326817826426896, + "grad_norm": 0.2868465781211853, + "learning_rate": 9.87830031716769e-05, + "loss": 0.1103, + "step": 418 + }, + { + "epoch": 0.3275996872556685, + "grad_norm": 0.2630326747894287, + "learning_rate": 9.876799434433628e-05, + "loss": 0.1424, + "step": 419 + }, + { + "epoch": 0.328381548084441, + "grad_norm": 0.30072352290153503, + "learning_rate": 9.875289468812081e-05, + "loss": 0.1615, + "step": 420 + }, + { + "epoch": 0.32916340891321344, + "grad_norm": 0.33478954434394836, + "learning_rate": 9.873770423115314e-05, + "loss": 0.1799, + "step": 421 + }, + { + "epoch": 0.3299452697419859, + "grad_norm": 0.41718190908432007, + "learning_rate": 9.8722423001725e-05, + "loss": 0.1556, + "step": 422 + }, + { + "epoch": 0.3307271305707584, + "grad_norm": 0.3917146623134613, + "learning_rate": 9.870705102829723e-05, + "loss": 0.1786, + "step": 423 + }, + { + "epoch": 0.3315089913995309, + "grad_norm": 0.47362616658210754, + "learning_rate": 9.869158833949966e-05, + "loss": 0.1621, + "step": 424 + }, + { + "epoch": 0.33229085222830335, + "grad_norm": 0.4118567109107971, + "learning_rate": 9.867603496413103e-05, + "loss": 0.1599, + "step": 425 + }, + { + "epoch": 0.33307271305707586, + "grad_norm": 0.421069860458374, + "learning_rate": 9.866039093115905e-05, + "loss": 0.21, + "step": 426 + }, + { + "epoch": 0.3338545738858483, + "grad_norm": 0.33168917894363403, + "learning_rate": 9.864465626972023e-05, + "loss": 0.1465, + "step": 427 + }, + { + "epoch": 0.3346364347146208, + "grad_norm": 0.510787844657898, + "learning_rate": 9.862883100911991e-05, + "loss": 0.183, + "step": 428 + }, + { + "epoch": 0.33541829554339325, + "grad_norm": 0.27205660939216614, + "learning_rate": 9.861291517883213e-05, + "loss": 0.114, + "step": 429 + }, + { + "epoch": 0.33620015637216577, + "grad_norm": 0.38495227694511414, + "learning_rate": 9.859690880849962e-05, + "loss": 0.1453, + "step": 430 + }, + { + "epoch": 0.33698201720093823, + "grad_norm": 0.33769121766090393, + "learning_rate": 9.858081192793378e-05, + "loss": 0.1366, + "step": 431 + }, + { + "epoch": 0.3377638780297107, + "grad_norm": 0.3808351457118988, + "learning_rate": 9.856462456711451e-05, + "loss": 0.2269, + "step": 432 + }, + { + "epoch": 0.3385457388584832, + "grad_norm": 0.33304157853126526, + "learning_rate": 9.85483467561903e-05, + "loss": 0.1566, + "step": 433 + }, + { + "epoch": 0.3393275996872557, + "grad_norm": 0.34193313121795654, + "learning_rate": 9.853197852547802e-05, + "loss": 0.1855, + "step": 434 + }, + { + "epoch": 0.34010946051602814, + "grad_norm": 0.3733983337879181, + "learning_rate": 9.851551990546306e-05, + "loss": 0.1662, + "step": 435 + }, + { + "epoch": 0.3408913213448006, + "grad_norm": 0.3454480469226837, + "learning_rate": 9.849897092679903e-05, + "loss": 0.1305, + "step": 436 + }, + { + "epoch": 0.3416731821735731, + "grad_norm": 0.3219950199127197, + "learning_rate": 9.848233162030794e-05, + "loss": 0.1215, + "step": 437 + }, + { + "epoch": 0.3424550430023456, + "grad_norm": 0.3298224210739136, + "learning_rate": 9.846560201697993e-05, + "loss": 0.1481, + "step": 438 + }, + { + "epoch": 0.34323690383111805, + "grad_norm": 0.4454590976238251, + "learning_rate": 9.84487821479734e-05, + "loss": 0.116, + "step": 439 + }, + { + "epoch": 0.34401876465989056, + "grad_norm": 0.43617334961891174, + "learning_rate": 9.843187204461482e-05, + "loss": 0.1323, + "step": 440 + }, + { + "epoch": 0.344800625488663, + "grad_norm": 0.36367228627204895, + "learning_rate": 9.841487173839873e-05, + "loss": 0.1167, + "step": 441 + }, + { + "epoch": 0.3455824863174355, + "grad_norm": 0.4517843425273895, + "learning_rate": 9.839778126098769e-05, + "loss": 0.1517, + "step": 442 + }, + { + "epoch": 0.34636434714620795, + "grad_norm": 0.39846858382225037, + "learning_rate": 9.838060064421217e-05, + "loss": 0.147, + "step": 443 + }, + { + "epoch": 0.34714620797498047, + "grad_norm": 0.3574373126029968, + "learning_rate": 9.836332992007053e-05, + "loss": 0.1441, + "step": 444 + }, + { + "epoch": 0.34792806880375293, + "grad_norm": 0.3796740770339966, + "learning_rate": 9.834596912072897e-05, + "loss": 0.1642, + "step": 445 + }, + { + "epoch": 0.3487099296325254, + "grad_norm": 0.5189517736434937, + "learning_rate": 9.832851827852146e-05, + "loss": 0.1503, + "step": 446 + }, + { + "epoch": 0.3494917904612979, + "grad_norm": 0.4127172827720642, + "learning_rate": 9.831097742594958e-05, + "loss": 0.1651, + "step": 447 + }, + { + "epoch": 0.3502736512900704, + "grad_norm": 0.4301576316356659, + "learning_rate": 9.82933465956827e-05, + "loss": 0.1491, + "step": 448 + }, + { + "epoch": 0.35105551211884284, + "grad_norm": 0.3040635287761688, + "learning_rate": 9.827562582055765e-05, + "loss": 0.1265, + "step": 449 + }, + { + "epoch": 0.3518373729476153, + "grad_norm": 0.2885948121547699, + "learning_rate": 9.825781513357883e-05, + "loss": 0.1044, + "step": 450 + }, + { + "epoch": 0.3526192337763878, + "grad_norm": 0.43714582920074463, + "learning_rate": 9.823991456791811e-05, + "loss": 0.1536, + "step": 451 + }, + { + "epoch": 0.3534010946051603, + "grad_norm": 0.3937210142612457, + "learning_rate": 9.822192415691471e-05, + "loss": 0.1558, + "step": 452 + }, + { + "epoch": 0.35418295543393274, + "grad_norm": 0.4825761914253235, + "learning_rate": 9.820384393407525e-05, + "loss": 0.188, + "step": 453 + }, + { + "epoch": 0.35496481626270526, + "grad_norm": 0.47098153829574585, + "learning_rate": 9.818567393307354e-05, + "loss": 0.1939, + "step": 454 + }, + { + "epoch": 0.3557466770914777, + "grad_norm": 0.364969402551651, + "learning_rate": 9.816741418775066e-05, + "loss": 0.1361, + "step": 455 + }, + { + "epoch": 0.3565285379202502, + "grad_norm": 0.3509956896305084, + "learning_rate": 9.814906473211482e-05, + "loss": 0.1622, + "step": 456 + }, + { + "epoch": 0.35731039874902265, + "grad_norm": 0.4199284315109253, + "learning_rate": 9.813062560034134e-05, + "loss": 0.1774, + "step": 457 + }, + { + "epoch": 0.35809225957779517, + "grad_norm": 0.44852301478385925, + "learning_rate": 9.811209682677247e-05, + "loss": 0.1528, + "step": 458 + }, + { + "epoch": 0.35887412040656763, + "grad_norm": 0.2535669207572937, + "learning_rate": 9.809347844591753e-05, + "loss": 0.1465, + "step": 459 + }, + { + "epoch": 0.3596559812353401, + "grad_norm": 0.45635074377059937, + "learning_rate": 9.807477049245263e-05, + "loss": 0.1892, + "step": 460 + }, + { + "epoch": 0.3604378420641126, + "grad_norm": 0.45861974358558655, + "learning_rate": 9.805597300122081e-05, + "loss": 0.1636, + "step": 461 + }, + { + "epoch": 0.3612197028928851, + "grad_norm": 0.38760921359062195, + "learning_rate": 9.803708600723176e-05, + "loss": 0.1571, + "step": 462 + }, + { + "epoch": 0.36200156372165754, + "grad_norm": 0.34623679518699646, + "learning_rate": 9.801810954566195e-05, + "loss": 0.1432, + "step": 463 + }, + { + "epoch": 0.36278342455043, + "grad_norm": 0.4060858190059662, + "learning_rate": 9.799904365185442e-05, + "loss": 0.148, + "step": 464 + }, + { + "epoch": 0.3635652853792025, + "grad_norm": 0.3664781451225281, + "learning_rate": 9.797988836131884e-05, + "loss": 0.1386, + "step": 465 + }, + { + "epoch": 0.364347146207975, + "grad_norm": 0.3110964894294739, + "learning_rate": 9.796064370973133e-05, + "loss": 0.1201, + "step": 466 + }, + { + "epoch": 0.36512900703674744, + "grad_norm": 0.31303268671035767, + "learning_rate": 9.794130973293445e-05, + "loss": 0.1855, + "step": 467 + }, + { + "epoch": 0.36591086786551996, + "grad_norm": 0.27726083993911743, + "learning_rate": 9.792188646693714e-05, + "loss": 0.1037, + "step": 468 + }, + { + "epoch": 0.3666927286942924, + "grad_norm": 0.4889364540576935, + "learning_rate": 9.790237394791461e-05, + "loss": 0.1706, + "step": 469 + }, + { + "epoch": 0.3674745895230649, + "grad_norm": 0.41288602352142334, + "learning_rate": 9.788277221220836e-05, + "loss": 0.114, + "step": 470 + }, + { + "epoch": 0.36825645035183735, + "grad_norm": 0.42090147733688354, + "learning_rate": 9.786308129632598e-05, + "loss": 0.1901, + "step": 471 + }, + { + "epoch": 0.36903831118060987, + "grad_norm": 0.511559247970581, + "learning_rate": 9.78433012369412e-05, + "loss": 0.1699, + "step": 472 + }, + { + "epoch": 0.36982017200938233, + "grad_norm": 0.37128859758377075, + "learning_rate": 9.782343207089377e-05, + "loss": 0.1374, + "step": 473 + }, + { + "epoch": 0.3706020328381548, + "grad_norm": 0.34879276156425476, + "learning_rate": 9.780347383518937e-05, + "loss": 0.1421, + "step": 474 + }, + { + "epoch": 0.3713838936669273, + "grad_norm": 0.3927097022533417, + "learning_rate": 9.778342656699964e-05, + "loss": 0.1767, + "step": 475 + }, + { + "epoch": 0.3721657544956998, + "grad_norm": 0.35084983706474304, + "learning_rate": 9.776329030366196e-05, + "loss": 0.1329, + "step": 476 + }, + { + "epoch": 0.37294761532447224, + "grad_norm": 0.3951479494571686, + "learning_rate": 9.77430650826795e-05, + "loss": 0.1595, + "step": 477 + }, + { + "epoch": 0.3737294761532447, + "grad_norm": 0.3984255790710449, + "learning_rate": 9.77227509417211e-05, + "loss": 0.1838, + "step": 478 + }, + { + "epoch": 0.3745113369820172, + "grad_norm": 0.42549511790275574, + "learning_rate": 9.770234791862125e-05, + "loss": 0.1813, + "step": 479 + }, + { + "epoch": 0.3752931978107897, + "grad_norm": 0.35434046387672424, + "learning_rate": 9.768185605137992e-05, + "loss": 0.1598, + "step": 480 + }, + { + "epoch": 0.37607505863956214, + "grad_norm": 0.4132823348045349, + "learning_rate": 9.766127537816256e-05, + "loss": 0.1416, + "step": 481 + }, + { + "epoch": 0.37685691946833466, + "grad_norm": 0.37142178416252136, + "learning_rate": 9.764060593730007e-05, + "loss": 0.1558, + "step": 482 + }, + { + "epoch": 0.3776387802971071, + "grad_norm": 0.23793669044971466, + "learning_rate": 9.761984776728864e-05, + "loss": 0.0921, + "step": 483 + }, + { + "epoch": 0.3784206411258796, + "grad_norm": 0.3711697459220886, + "learning_rate": 9.759900090678967e-05, + "loss": 0.1345, + "step": 484 + }, + { + "epoch": 0.37920250195465205, + "grad_norm": 0.31402236223220825, + "learning_rate": 9.757806539462985e-05, + "loss": 0.118, + "step": 485 + }, + { + "epoch": 0.37998436278342457, + "grad_norm": 0.38717904686927795, + "learning_rate": 9.755704126980088e-05, + "loss": 0.1424, + "step": 486 + }, + { + "epoch": 0.38076622361219703, + "grad_norm": 0.4142398238182068, + "learning_rate": 9.753592857145957e-05, + "loss": 0.1651, + "step": 487 + }, + { + "epoch": 0.3815480844409695, + "grad_norm": 0.433275043964386, + "learning_rate": 9.751472733892763e-05, + "loss": 0.138, + "step": 488 + }, + { + "epoch": 0.382329945269742, + "grad_norm": 0.4147091507911682, + "learning_rate": 9.749343761169171e-05, + "loss": 0.1581, + "step": 489 + }, + { + "epoch": 0.3831118060985145, + "grad_norm": 0.37260568141937256, + "learning_rate": 9.74720594294033e-05, + "loss": 0.1656, + "step": 490 + }, + { + "epoch": 0.38389366692728694, + "grad_norm": 0.42818203568458557, + "learning_rate": 9.745059283187857e-05, + "loss": 0.1356, + "step": 491 + }, + { + "epoch": 0.3846755277560594, + "grad_norm": 0.35563212633132935, + "learning_rate": 9.742903785909838e-05, + "loss": 0.1773, + "step": 492 + }, + { + "epoch": 0.3854573885848319, + "grad_norm": 0.4813738167285919, + "learning_rate": 9.74073945512082e-05, + "loss": 0.1547, + "step": 493 + }, + { + "epoch": 0.3862392494136044, + "grad_norm": 0.29549703001976013, + "learning_rate": 9.738566294851805e-05, + "loss": 0.1249, + "step": 494 + }, + { + "epoch": 0.38702111024237684, + "grad_norm": 0.34138891100883484, + "learning_rate": 9.736384309150233e-05, + "loss": 0.1532, + "step": 495 + }, + { + "epoch": 0.38780297107114936, + "grad_norm": 0.37980690598487854, + "learning_rate": 9.734193502079987e-05, + "loss": 0.1146, + "step": 496 + }, + { + "epoch": 0.3885848318999218, + "grad_norm": 0.2760559916496277, + "learning_rate": 9.731993877721377e-05, + "loss": 0.1394, + "step": 497 + }, + { + "epoch": 0.3893666927286943, + "grad_norm": 0.3500741422176361, + "learning_rate": 9.729785440171133e-05, + "loss": 0.1481, + "step": 498 + }, + { + "epoch": 0.39014855355746675, + "grad_norm": 0.3756244480609894, + "learning_rate": 9.727568193542403e-05, + "loss": 0.1704, + "step": 499 + }, + { + "epoch": 0.39093041438623927, + "grad_norm": 0.36916160583496094, + "learning_rate": 9.72534214196474e-05, + "loss": 0.15, + "step": 500 + }, + { + "epoch": 0.39171227521501173, + "grad_norm": 0.33896347880363464, + "learning_rate": 9.723107289584095e-05, + "loss": 0.1379, + "step": 501 + }, + { + "epoch": 0.3924941360437842, + "grad_norm": 0.3901592791080475, + "learning_rate": 9.720863640562812e-05, + "loss": 0.1439, + "step": 502 + }, + { + "epoch": 0.3932759968725567, + "grad_norm": 0.43926864862442017, + "learning_rate": 9.718611199079617e-05, + "loss": 0.2371, + "step": 503 + }, + { + "epoch": 0.3940578577013292, + "grad_norm": 0.38037753105163574, + "learning_rate": 9.716349969329612e-05, + "loss": 0.1354, + "step": 504 + }, + { + "epoch": 0.39483971853010164, + "grad_norm": 0.37031248211860657, + "learning_rate": 9.714079955524269e-05, + "loss": 0.1233, + "step": 505 + }, + { + "epoch": 0.3956215793588741, + "grad_norm": 0.340681791305542, + "learning_rate": 9.711801161891417e-05, + "loss": 0.1247, + "step": 506 + }, + { + "epoch": 0.3964034401876466, + "grad_norm": 0.47795501351356506, + "learning_rate": 9.709513592675236e-05, + "loss": 0.1425, + "step": 507 + }, + { + "epoch": 0.3971853010164191, + "grad_norm": 0.4588595926761627, + "learning_rate": 9.707217252136257e-05, + "loss": 0.16, + "step": 508 + }, + { + "epoch": 0.39796716184519154, + "grad_norm": 0.41790610551834106, + "learning_rate": 9.704912144551341e-05, + "loss": 0.1514, + "step": 509 + }, + { + "epoch": 0.39874902267396406, + "grad_norm": 0.34573647379875183, + "learning_rate": 9.70259827421368e-05, + "loss": 0.1614, + "step": 510 + }, + { + "epoch": 0.3995308835027365, + "grad_norm": 0.5089054107666016, + "learning_rate": 9.700275645432784e-05, + "loss": 0.2109, + "step": 511 + }, + { + "epoch": 0.400312744331509, + "grad_norm": 0.38729050755500793, + "learning_rate": 9.697944262534478e-05, + "loss": 0.1341, + "step": 512 + }, + { + "epoch": 0.40109460516028145, + "grad_norm": 0.42094919085502625, + "learning_rate": 9.695604129860889e-05, + "loss": 0.12, + "step": 513 + }, + { + "epoch": 0.40187646598905397, + "grad_norm": 0.3481001555919647, + "learning_rate": 9.693255251770444e-05, + "loss": 0.1285, + "step": 514 + }, + { + "epoch": 0.40265832681782643, + "grad_norm": 0.37856054306030273, + "learning_rate": 9.690897632637852e-05, + "loss": 0.1127, + "step": 515 + }, + { + "epoch": 0.4034401876465989, + "grad_norm": 0.4085037112236023, + "learning_rate": 9.688531276854109e-05, + "loss": 0.1315, + "step": 516 + }, + { + "epoch": 0.4042220484753714, + "grad_norm": 0.34440815448760986, + "learning_rate": 9.686156188826478e-05, + "loss": 0.158, + "step": 517 + }, + { + "epoch": 0.40500390930414387, + "grad_norm": 0.4735249876976013, + "learning_rate": 9.683772372978485e-05, + "loss": 0.1583, + "step": 518 + }, + { + "epoch": 0.40578577013291633, + "grad_norm": 0.33127447962760925, + "learning_rate": 9.681379833749915e-05, + "loss": 0.1002, + "step": 519 + }, + { + "epoch": 0.4065676309616888, + "grad_norm": 0.3883433938026428, + "learning_rate": 9.678978575596795e-05, + "loss": 0.1379, + "step": 520 + }, + { + "epoch": 0.4073494917904613, + "grad_norm": 0.3916020691394806, + "learning_rate": 9.676568602991399e-05, + "loss": 0.1487, + "step": 521 + }, + { + "epoch": 0.4081313526192338, + "grad_norm": 0.34118443727493286, + "learning_rate": 9.674149920422222e-05, + "loss": 0.1419, + "step": 522 + }, + { + "epoch": 0.40891321344800624, + "grad_norm": 0.43779224157333374, + "learning_rate": 9.671722532393985e-05, + "loss": 0.1595, + "step": 523 + }, + { + "epoch": 0.40969507427677876, + "grad_norm": 0.43785735964775085, + "learning_rate": 9.669286443427625e-05, + "loss": 0.1651, + "step": 524 + }, + { + "epoch": 0.4104769351055512, + "grad_norm": 0.5933069586753845, + "learning_rate": 9.666841658060282e-05, + "loss": 0.1758, + "step": 525 + }, + { + "epoch": 0.4112587959343237, + "grad_norm": 0.4479697644710541, + "learning_rate": 9.66438818084529e-05, + "loss": 0.1534, + "step": 526 + }, + { + "epoch": 0.41204065676309615, + "grad_norm": 0.3687957525253296, + "learning_rate": 9.661926016352178e-05, + "loss": 0.1255, + "step": 527 + }, + { + "epoch": 0.41282251759186867, + "grad_norm": 0.3272929787635803, + "learning_rate": 9.659455169166648e-05, + "loss": 0.1403, + "step": 528 + }, + { + "epoch": 0.41360437842064113, + "grad_norm": 0.3177148103713989, + "learning_rate": 9.656975643890578e-05, + "loss": 0.1255, + "step": 529 + }, + { + "epoch": 0.4143862392494136, + "grad_norm": 0.44476059079170227, + "learning_rate": 9.654487445142004e-05, + "loss": 0.1435, + "step": 530 + }, + { + "epoch": 0.4151681000781861, + "grad_norm": 0.38959184288978577, + "learning_rate": 9.651990577555122e-05, + "loss": 0.1252, + "step": 531 + }, + { + "epoch": 0.41594996090695857, + "grad_norm": 0.5031436085700989, + "learning_rate": 9.64948504578027e-05, + "loss": 0.1476, + "step": 532 + }, + { + "epoch": 0.41673182173573103, + "grad_norm": 0.41138991713523865, + "learning_rate": 9.64697085448392e-05, + "loss": 0.1536, + "step": 533 + }, + { + "epoch": 0.4175136825645035, + "grad_norm": 0.3746177852153778, + "learning_rate": 9.644448008348679e-05, + "loss": 0.1369, + "step": 534 + }, + { + "epoch": 0.418295543393276, + "grad_norm": 0.29382434487342834, + "learning_rate": 9.641916512073268e-05, + "loss": 0.1398, + "step": 535 + }, + { + "epoch": 0.4190774042220485, + "grad_norm": 0.3424062132835388, + "learning_rate": 9.639376370372519e-05, + "loss": 0.1229, + "step": 536 + }, + { + "epoch": 0.41985926505082094, + "grad_norm": 0.4400944709777832, + "learning_rate": 9.636827587977368e-05, + "loss": 0.1298, + "step": 537 + }, + { + "epoch": 0.42064112587959346, + "grad_norm": 0.36449581384658813, + "learning_rate": 9.634270169634845e-05, + "loss": 0.1332, + "step": 538 + }, + { + "epoch": 0.4214229867083659, + "grad_norm": 0.405576229095459, + "learning_rate": 9.63170412010806e-05, + "loss": 0.1447, + "step": 539 + }, + { + "epoch": 0.4222048475371384, + "grad_norm": 0.35496416687965393, + "learning_rate": 9.629129444176202e-05, + "loss": 0.1537, + "step": 540 + }, + { + "epoch": 0.42298670836591085, + "grad_norm": 0.26653626561164856, + "learning_rate": 9.626546146634523e-05, + "loss": 0.1199, + "step": 541 + }, + { + "epoch": 0.42376856919468336, + "grad_norm": 0.3852273225784302, + "learning_rate": 9.623954232294335e-05, + "loss": 0.1065, + "step": 542 + }, + { + "epoch": 0.4245504300234558, + "grad_norm": 0.30202388763427734, + "learning_rate": 9.621353705982998e-05, + "loss": 0.1459, + "step": 543 + }, + { + "epoch": 0.4253322908522283, + "grad_norm": 0.555022120475769, + "learning_rate": 9.618744572543912e-05, + "loss": 0.1564, + "step": 544 + }, + { + "epoch": 0.4261141516810008, + "grad_norm": 0.3162443935871124, + "learning_rate": 9.616126836836508e-05, + "loss": 0.1132, + "step": 545 + }, + { + "epoch": 0.42689601250977327, + "grad_norm": 0.348391056060791, + "learning_rate": 9.613500503736237e-05, + "loss": 0.1762, + "step": 546 + }, + { + "epoch": 0.42767787333854573, + "grad_norm": 0.34790390729904175, + "learning_rate": 9.61086557813456e-05, + "loss": 0.1524, + "step": 547 + }, + { + "epoch": 0.4284597341673182, + "grad_norm": 0.3102438449859619, + "learning_rate": 9.608222064938947e-05, + "loss": 0.1189, + "step": 548 + }, + { + "epoch": 0.4292415949960907, + "grad_norm": 0.3740433156490326, + "learning_rate": 9.60556996907286e-05, + "loss": 0.1382, + "step": 549 + }, + { + "epoch": 0.4300234558248632, + "grad_norm": 0.3209870755672455, + "learning_rate": 9.602909295475743e-05, + "loss": 0.1858, + "step": 550 + }, + { + "epoch": 0.43080531665363564, + "grad_norm": 0.3138205409049988, + "learning_rate": 9.600240049103017e-05, + "loss": 0.135, + "step": 551 + }, + { + "epoch": 0.43158717748240816, + "grad_norm": 0.2969190180301666, + "learning_rate": 9.597562234926074e-05, + "loss": 0.1124, + "step": 552 + }, + { + "epoch": 0.4323690383111806, + "grad_norm": 0.4288076162338257, + "learning_rate": 9.594875857932258e-05, + "loss": 0.17, + "step": 553 + }, + { + "epoch": 0.4331508991399531, + "grad_norm": 0.36456018686294556, + "learning_rate": 9.592180923124861e-05, + "loss": 0.1732, + "step": 554 + }, + { + "epoch": 0.43393275996872555, + "grad_norm": 0.41333866119384766, + "learning_rate": 9.589477435523118e-05, + "loss": 0.1762, + "step": 555 + }, + { + "epoch": 0.43471462079749806, + "grad_norm": 0.3368997871875763, + "learning_rate": 9.58676540016219e-05, + "loss": 0.112, + "step": 556 + }, + { + "epoch": 0.4354964816262705, + "grad_norm": 0.29240164160728455, + "learning_rate": 9.584044822093157e-05, + "loss": 0.1659, + "step": 557 + }, + { + "epoch": 0.436278342455043, + "grad_norm": 0.29775160551071167, + "learning_rate": 9.581315706383013e-05, + "loss": 0.1055, + "step": 558 + }, + { + "epoch": 0.4370602032838155, + "grad_norm": 0.42036205530166626, + "learning_rate": 9.57857805811465e-05, + "loss": 0.1232, + "step": 559 + }, + { + "epoch": 0.43784206411258797, + "grad_norm": 0.36852604150772095, + "learning_rate": 9.575831882386855e-05, + "loss": 0.1359, + "step": 560 + }, + { + "epoch": 0.43862392494136043, + "grad_norm": 0.3674640357494354, + "learning_rate": 9.573077184314294e-05, + "loss": 0.1351, + "step": 561 + }, + { + "epoch": 0.4394057857701329, + "grad_norm": 0.37468084692955017, + "learning_rate": 9.570313969027504e-05, + "loss": 0.1353, + "step": 562 + }, + { + "epoch": 0.4401876465989054, + "grad_norm": 0.3501920998096466, + "learning_rate": 9.567542241672891e-05, + "loss": 0.1373, + "step": 563 + }, + { + "epoch": 0.4409695074276779, + "grad_norm": 0.5198925137519836, + "learning_rate": 9.564762007412711e-05, + "loss": 0.1635, + "step": 564 + }, + { + "epoch": 0.44175136825645034, + "grad_norm": 0.33644333481788635, + "learning_rate": 9.561973271425061e-05, + "loss": 0.1384, + "step": 565 + }, + { + "epoch": 0.44253322908522286, + "grad_norm": 0.2597680389881134, + "learning_rate": 9.559176038903879e-05, + "loss": 0.1368, + "step": 566 + }, + { + "epoch": 0.4433150899139953, + "grad_norm": 0.38457760214805603, + "learning_rate": 9.55637031505892e-05, + "loss": 0.1596, + "step": 567 + }, + { + "epoch": 0.4440969507427678, + "grad_norm": 0.33996647596359253, + "learning_rate": 9.55355610511576e-05, + "loss": 0.1253, + "step": 568 + }, + { + "epoch": 0.44487881157154024, + "grad_norm": 0.4009721279144287, + "learning_rate": 9.550733414315776e-05, + "loss": 0.1715, + "step": 569 + }, + { + "epoch": 0.44566067240031276, + "grad_norm": 0.4111771285533905, + "learning_rate": 9.547902247916143e-05, + "loss": 0.1479, + "step": 570 + }, + { + "epoch": 0.4464425332290852, + "grad_norm": 0.35021543502807617, + "learning_rate": 9.545062611189821e-05, + "loss": 0.118, + "step": 571 + }, + { + "epoch": 0.4472243940578577, + "grad_norm": 0.41540324687957764, + "learning_rate": 9.542214509425544e-05, + "loss": 0.1326, + "step": 572 + }, + { + "epoch": 0.4480062548866302, + "grad_norm": 0.4157445430755615, + "learning_rate": 9.539357947927815e-05, + "loss": 0.1728, + "step": 573 + }, + { + "epoch": 0.44878811571540267, + "grad_norm": 0.49535229802131653, + "learning_rate": 9.536492932016889e-05, + "loss": 0.134, + "step": 574 + }, + { + "epoch": 0.44956997654417513, + "grad_norm": 0.3090532720088959, + "learning_rate": 9.53361946702877e-05, + "loss": 0.1258, + "step": 575 + }, + { + "epoch": 0.4503518373729476, + "grad_norm": 0.32857659459114075, + "learning_rate": 9.530737558315196e-05, + "loss": 0.155, + "step": 576 + }, + { + "epoch": 0.4511336982017201, + "grad_norm": 0.43488988280296326, + "learning_rate": 9.527847211243635e-05, + "loss": 0.1295, + "step": 577 + }, + { + "epoch": 0.4519155590304926, + "grad_norm": 0.44877803325653076, + "learning_rate": 9.524948431197267e-05, + "loss": 0.1721, + "step": 578 + }, + { + "epoch": 0.45269741985926504, + "grad_norm": 0.32422056794166565, + "learning_rate": 9.52204122357498e-05, + "loss": 0.1148, + "step": 579 + }, + { + "epoch": 0.45347928068803756, + "grad_norm": 0.2760488986968994, + "learning_rate": 9.519125593791355e-05, + "loss": 0.1013, + "step": 580 + }, + { + "epoch": 0.45426114151681, + "grad_norm": 0.41548505425453186, + "learning_rate": 9.516201547276668e-05, + "loss": 0.1418, + "step": 581 + }, + { + "epoch": 0.4550430023455825, + "grad_norm": 0.3416723310947418, + "learning_rate": 9.513269089476862e-05, + "loss": 0.1955, + "step": 582 + }, + { + "epoch": 0.45582486317435494, + "grad_norm": 0.38667765259742737, + "learning_rate": 9.510328225853549e-05, + "loss": 0.1675, + "step": 583 + }, + { + "epoch": 0.45660672400312746, + "grad_norm": 0.3269575834274292, + "learning_rate": 9.507378961883992e-05, + "loss": 0.1565, + "step": 584 + }, + { + "epoch": 0.4573885848318999, + "grad_norm": 0.4353930652141571, + "learning_rate": 9.50442130306111e-05, + "loss": 0.1567, + "step": 585 + }, + { + "epoch": 0.4581704456606724, + "grad_norm": 0.3163772225379944, + "learning_rate": 9.501455254893447e-05, + "loss": 0.1271, + "step": 586 + }, + { + "epoch": 0.4589523064894449, + "grad_norm": 0.36551064252853394, + "learning_rate": 9.498480822905176e-05, + "loss": 0.1502, + "step": 587 + }, + { + "epoch": 0.45973416731821737, + "grad_norm": 0.3503546416759491, + "learning_rate": 9.495498012636085e-05, + "loss": 0.1323, + "step": 588 + }, + { + "epoch": 0.46051602814698983, + "grad_norm": 0.34271863102912903, + "learning_rate": 9.492506829641566e-05, + "loss": 0.1161, + "step": 589 + }, + { + "epoch": 0.4612978889757623, + "grad_norm": 0.3517410159111023, + "learning_rate": 9.4895072794926e-05, + "loss": 0.138, + "step": 590 + }, + { + "epoch": 0.4620797498045348, + "grad_norm": 0.4492360055446625, + "learning_rate": 9.486499367775764e-05, + "loss": 0.1446, + "step": 591 + }, + { + "epoch": 0.4628616106333073, + "grad_norm": 0.4240667521953583, + "learning_rate": 9.48348310009319e-05, + "loss": 0.1273, + "step": 592 + }, + { + "epoch": 0.46364347146207974, + "grad_norm": 0.3540504276752472, + "learning_rate": 9.480458482062594e-05, + "loss": 0.1645, + "step": 593 + }, + { + "epoch": 0.46442533229085226, + "grad_norm": 0.38456183671951294, + "learning_rate": 9.477425519317224e-05, + "loss": 0.1369, + "step": 594 + }, + { + "epoch": 0.4652071931196247, + "grad_norm": 0.36017024517059326, + "learning_rate": 9.474384217505883e-05, + "loss": 0.1233, + "step": 595 + }, + { + "epoch": 0.4659890539483972, + "grad_norm": 0.32265275716781616, + "learning_rate": 9.471334582292901e-05, + "loss": 0.1219, + "step": 596 + }, + { + "epoch": 0.46677091477716964, + "grad_norm": 0.36679038405418396, + "learning_rate": 9.468276619358129e-05, + "loss": 0.159, + "step": 597 + }, + { + "epoch": 0.46755277560594216, + "grad_norm": 0.36844977736473083, + "learning_rate": 9.465210334396927e-05, + "loss": 0.1405, + "step": 598 + }, + { + "epoch": 0.4683346364347146, + "grad_norm": 0.5122119188308716, + "learning_rate": 9.462135733120156e-05, + "loss": 0.1428, + "step": 599 + }, + { + "epoch": 0.4691164972634871, + "grad_norm": 0.35265064239501953, + "learning_rate": 9.459052821254166e-05, + "loss": 0.1137, + "step": 600 + }, + { + "epoch": 0.4691164972634871, + "eval_loss": 0.15098431706428528, + "eval_runtime": 13.3435, + "eval_samples_per_second": 3.897, + "eval_steps_per_second": 0.974, + "step": 600 + }, + { + "epoch": 0.4698983580922596, + "grad_norm": 0.38564780354499817, + "learning_rate": 9.455961604540784e-05, + "loss": 0.1325, + "step": 601 + }, + { + "epoch": 0.47068021892103207, + "grad_norm": 0.42470040917396545, + "learning_rate": 9.452862088737306e-05, + "loss": 0.1747, + "step": 602 + }, + { + "epoch": 0.47146207974980453, + "grad_norm": 0.5298458337783813, + "learning_rate": 9.449754279616481e-05, + "loss": 0.1574, + "step": 603 + }, + { + "epoch": 0.472243940578577, + "grad_norm": 0.4499131441116333, + "learning_rate": 9.446638182966511e-05, + "loss": 0.147, + "step": 604 + }, + { + "epoch": 0.4730258014073495, + "grad_norm": 0.3690560460090637, + "learning_rate": 9.443513804591026e-05, + "loss": 0.1243, + "step": 605 + }, + { + "epoch": 0.473807662236122, + "grad_norm": 0.29506915807724, + "learning_rate": 9.440381150309085e-05, + "loss": 0.1397, + "step": 606 + }, + { + "epoch": 0.47458952306489444, + "grad_norm": 0.347539484500885, + "learning_rate": 9.43724022595516e-05, + "loss": 0.1444, + "step": 607 + }, + { + "epoch": 0.47537138389366695, + "grad_norm": 0.3493543863296509, + "learning_rate": 9.434091037379125e-05, + "loss": 0.1533, + "step": 608 + }, + { + "epoch": 0.4761532447224394, + "grad_norm": 0.43107107281684875, + "learning_rate": 9.430933590446244e-05, + "loss": 0.1394, + "step": 609 + }, + { + "epoch": 0.4769351055512119, + "grad_norm": 0.38342204689979553, + "learning_rate": 9.427767891037165e-05, + "loss": 0.1304, + "step": 610 + }, + { + "epoch": 0.47771696637998434, + "grad_norm": 0.44626930356025696, + "learning_rate": 9.424593945047906e-05, + "loss": 0.1435, + "step": 611 + }, + { + "epoch": 0.47849882720875686, + "grad_norm": 0.3243478834629059, + "learning_rate": 9.42141175838984e-05, + "loss": 0.1265, + "step": 612 + }, + { + "epoch": 0.4792806880375293, + "grad_norm": 0.3624386191368103, + "learning_rate": 9.418221336989695e-05, + "loss": 0.1165, + "step": 613 + }, + { + "epoch": 0.4800625488663018, + "grad_norm": 0.5078319907188416, + "learning_rate": 9.415022686789528e-05, + "loss": 0.1526, + "step": 614 + }, + { + "epoch": 0.4808444096950743, + "grad_norm": 0.3112984299659729, + "learning_rate": 9.411815813746726e-05, + "loss": 0.1167, + "step": 615 + }, + { + "epoch": 0.48162627052384677, + "grad_norm": 0.41424402594566345, + "learning_rate": 9.408600723833993e-05, + "loss": 0.1527, + "step": 616 + }, + { + "epoch": 0.48240813135261923, + "grad_norm": 0.5095820426940918, + "learning_rate": 9.405377423039331e-05, + "loss": 0.1354, + "step": 617 + }, + { + "epoch": 0.4831899921813917, + "grad_norm": 0.3398195505142212, + "learning_rate": 9.402145917366041e-05, + "loss": 0.1319, + "step": 618 + }, + { + "epoch": 0.4839718530101642, + "grad_norm": 0.3965277671813965, + "learning_rate": 9.398906212832699e-05, + "loss": 0.1279, + "step": 619 + }, + { + "epoch": 0.4847537138389367, + "grad_norm": 0.38243114948272705, + "learning_rate": 9.395658315473154e-05, + "loss": 0.1689, + "step": 620 + }, + { + "epoch": 0.48553557466770914, + "grad_norm": 0.3739413917064667, + "learning_rate": 9.392402231336518e-05, + "loss": 0.1593, + "step": 621 + }, + { + "epoch": 0.48631743549648165, + "grad_norm": 0.40374016761779785, + "learning_rate": 9.389137966487143e-05, + "loss": 0.1493, + "step": 622 + }, + { + "epoch": 0.4870992963252541, + "grad_norm": 0.3396798074245453, + "learning_rate": 9.38586552700462e-05, + "loss": 0.1698, + "step": 623 + }, + { + "epoch": 0.4878811571540266, + "grad_norm": 0.34499019384384155, + "learning_rate": 9.38258491898377e-05, + "loss": 0.1315, + "step": 624 + }, + { + "epoch": 0.48866301798279904, + "grad_norm": 0.4252553880214691, + "learning_rate": 9.379296148534619e-05, + "loss": 0.1666, + "step": 625 + }, + { + "epoch": 0.48944487881157156, + "grad_norm": 0.3969322144985199, + "learning_rate": 9.375999221782402e-05, + "loss": 0.1067, + "step": 626 + }, + { + "epoch": 0.490226739640344, + "grad_norm": 0.3985759913921356, + "learning_rate": 9.372694144867544e-05, + "loss": 0.1974, + "step": 627 + }, + { + "epoch": 0.4910086004691165, + "grad_norm": 0.3676362931728363, + "learning_rate": 9.369380923945645e-05, + "loss": 0.1118, + "step": 628 + }, + { + "epoch": 0.491790461297889, + "grad_norm": 0.35326021909713745, + "learning_rate": 9.36605956518748e-05, + "loss": 0.1567, + "step": 629 + }, + { + "epoch": 0.49257232212666147, + "grad_norm": 0.3549090623855591, + "learning_rate": 9.362730074778973e-05, + "loss": 0.1293, + "step": 630 + }, + { + "epoch": 0.49335418295543393, + "grad_norm": 0.378653347492218, + "learning_rate": 9.359392458921198e-05, + "loss": 0.1272, + "step": 631 + }, + { + "epoch": 0.4941360437842064, + "grad_norm": 0.32727909088134766, + "learning_rate": 9.356046723830361e-05, + "loss": 0.1499, + "step": 632 + }, + { + "epoch": 0.4949179046129789, + "grad_norm": 0.30692726373672485, + "learning_rate": 9.352692875737787e-05, + "loss": 0.1832, + "step": 633 + }, + { + "epoch": 0.49569976544175137, + "grad_norm": 0.43790024518966675, + "learning_rate": 9.349330920889919e-05, + "loss": 0.1534, + "step": 634 + }, + { + "epoch": 0.49648162627052383, + "grad_norm": 0.2864377200603485, + "learning_rate": 9.34596086554829e-05, + "loss": 0.131, + "step": 635 + }, + { + "epoch": 0.49726348709929635, + "grad_norm": 0.2927132844924927, + "learning_rate": 9.342582715989525e-05, + "loss": 0.1064, + "step": 636 + }, + { + "epoch": 0.4980453479280688, + "grad_norm": 0.2664753794670105, + "learning_rate": 9.339196478505321e-05, + "loss": 0.0869, + "step": 637 + }, + { + "epoch": 0.4988272087568413, + "grad_norm": 0.3081669509410858, + "learning_rate": 9.33580215940244e-05, + "loss": 0.1209, + "step": 638 + }, + { + "epoch": 0.49960906958561374, + "grad_norm": 0.33297160267829895, + "learning_rate": 9.332399765002698e-05, + "loss": 0.1227, + "step": 639 + }, + { + "epoch": 0.5003909304143862, + "grad_norm": 0.4005429148674011, + "learning_rate": 9.328989301642947e-05, + "loss": 0.1268, + "step": 640 + }, + { + "epoch": 0.5011727912431587, + "grad_norm": 0.33951929211616516, + "learning_rate": 9.32557077567507e-05, + "loss": 0.1343, + "step": 641 + }, + { + "epoch": 0.5019546520719312, + "grad_norm": 0.3339715898036957, + "learning_rate": 9.322144193465966e-05, + "loss": 0.1252, + "step": 642 + }, + { + "epoch": 0.5027365129007036, + "grad_norm": 0.300782710313797, + "learning_rate": 9.318709561397537e-05, + "loss": 0.1014, + "step": 643 + }, + { + "epoch": 0.5035183737294762, + "grad_norm": 0.32149744033813477, + "learning_rate": 9.315266885866678e-05, + "loss": 0.1622, + "step": 644 + }, + { + "epoch": 0.5043002345582487, + "grad_norm": 0.3509116470813751, + "learning_rate": 9.311816173285268e-05, + "loss": 0.1517, + "step": 645 + }, + { + "epoch": 0.5050820953870211, + "grad_norm": 0.36596521735191345, + "learning_rate": 9.308357430080148e-05, + "loss": 0.1331, + "step": 646 + }, + { + "epoch": 0.5058639562157936, + "grad_norm": 0.43650323152542114, + "learning_rate": 9.304890662693123e-05, + "loss": 0.1469, + "step": 647 + }, + { + "epoch": 0.506645817044566, + "grad_norm": 0.48330414295196533, + "learning_rate": 9.301415877580938e-05, + "loss": 0.1492, + "step": 648 + }, + { + "epoch": 0.5074276778733385, + "grad_norm": 0.3281579315662384, + "learning_rate": 9.297933081215273e-05, + "loss": 0.1025, + "step": 649 + }, + { + "epoch": 0.508209538702111, + "grad_norm": 0.41272157430648804, + "learning_rate": 9.294442280082726e-05, + "loss": 0.1362, + "step": 650 + }, + { + "epoch": 0.5089913995308835, + "grad_norm": 0.3451278805732727, + "learning_rate": 9.29094348068481e-05, + "loss": 0.1411, + "step": 651 + }, + { + "epoch": 0.509773260359656, + "grad_norm": 0.45731282234191895, + "learning_rate": 9.287436689537928e-05, + "loss": 0.147, + "step": 652 + }, + { + "epoch": 0.5105551211884285, + "grad_norm": 0.47909867763519287, + "learning_rate": 9.283921913173368e-05, + "loss": 0.157, + "step": 653 + }, + { + "epoch": 0.5113369820172009, + "grad_norm": 0.3986799716949463, + "learning_rate": 9.280399158137295e-05, + "loss": 0.1244, + "step": 654 + }, + { + "epoch": 0.5121188428459734, + "grad_norm": 0.41379353404045105, + "learning_rate": 9.276868430990726e-05, + "loss": 0.1102, + "step": 655 + }, + { + "epoch": 0.5129007036747459, + "grad_norm": 0.414865642786026, + "learning_rate": 9.273329738309536e-05, + "loss": 0.1186, + "step": 656 + }, + { + "epoch": 0.5136825645035183, + "grad_norm": 0.2847290337085724, + "learning_rate": 9.269783086684428e-05, + "loss": 0.1188, + "step": 657 + }, + { + "epoch": 0.5144644253322909, + "grad_norm": 0.357957124710083, + "learning_rate": 9.266228482720929e-05, + "loss": 0.1381, + "step": 658 + }, + { + "epoch": 0.5152462861610634, + "grad_norm": 0.46672505140304565, + "learning_rate": 9.262665933039381e-05, + "loss": 0.1521, + "step": 659 + }, + { + "epoch": 0.5160281469898358, + "grad_norm": 0.3336876630783081, + "learning_rate": 9.25909544427492e-05, + "loss": 0.1222, + "step": 660 + }, + { + "epoch": 0.5168100078186083, + "grad_norm": 0.4090306758880615, + "learning_rate": 9.255517023077472e-05, + "loss": 0.1269, + "step": 661 + }, + { + "epoch": 0.5175918686473807, + "grad_norm": 0.42170941829681396, + "learning_rate": 9.251930676111735e-05, + "loss": 0.1365, + "step": 662 + }, + { + "epoch": 0.5183737294761532, + "grad_norm": 0.28856468200683594, + "learning_rate": 9.248336410057168e-05, + "loss": 0.1134, + "step": 663 + }, + { + "epoch": 0.5191555903049258, + "grad_norm": 0.3271030783653259, + "learning_rate": 9.244734231607981e-05, + "loss": 0.1565, + "step": 664 + }, + { + "epoch": 0.5199374511336982, + "grad_norm": 0.43632957339286804, + "learning_rate": 9.24112414747312e-05, + "loss": 0.1448, + "step": 665 + }, + { + "epoch": 0.5207193119624707, + "grad_norm": 0.32105788588523865, + "learning_rate": 9.237506164376252e-05, + "loss": 0.1092, + "step": 666 + }, + { + "epoch": 0.5215011727912432, + "grad_norm": 0.3537161350250244, + "learning_rate": 9.233880289055761e-05, + "loss": 0.1269, + "step": 667 + }, + { + "epoch": 0.5222830336200156, + "grad_norm": 0.37390899658203125, + "learning_rate": 9.230246528264726e-05, + "loss": 0.1456, + "step": 668 + }, + { + "epoch": 0.5230648944487881, + "grad_norm": 0.43096497654914856, + "learning_rate": 9.226604888770911e-05, + "loss": 0.1593, + "step": 669 + }, + { + "epoch": 0.5238467552775606, + "grad_norm": 0.34627631306648254, + "learning_rate": 9.222955377356761e-05, + "loss": 0.1423, + "step": 670 + }, + { + "epoch": 0.524628616106333, + "grad_norm": 0.3825629949569702, + "learning_rate": 9.219298000819376e-05, + "loss": 0.142, + "step": 671 + }, + { + "epoch": 0.5254104769351056, + "grad_norm": 0.338356614112854, + "learning_rate": 9.215632765970505e-05, + "loss": 0.1156, + "step": 672 + }, + { + "epoch": 0.5261923377638781, + "grad_norm": 0.3668399155139923, + "learning_rate": 9.211959679636535e-05, + "loss": 0.1801, + "step": 673 + }, + { + "epoch": 0.5269741985926505, + "grad_norm": 0.365161657333374, + "learning_rate": 9.208278748658476e-05, + "loss": 0.1168, + "step": 674 + }, + { + "epoch": 0.527756059421423, + "grad_norm": 0.4602578580379486, + "learning_rate": 9.204589979891946e-05, + "loss": 0.1381, + "step": 675 + }, + { + "epoch": 0.5285379202501954, + "grad_norm": 0.30687180161476135, + "learning_rate": 9.200893380207164e-05, + "loss": 0.1067, + "step": 676 + }, + { + "epoch": 0.5293197810789679, + "grad_norm": 0.2944718301296234, + "learning_rate": 9.197188956488931e-05, + "loss": 0.1255, + "step": 677 + }, + { + "epoch": 0.5301016419077405, + "grad_norm": 0.42425209283828735, + "learning_rate": 9.19347671563662e-05, + "loss": 0.1514, + "step": 678 + }, + { + "epoch": 0.5308835027365129, + "grad_norm": 0.33718201518058777, + "learning_rate": 9.189756664564167e-05, + "loss": 0.1645, + "step": 679 + }, + { + "epoch": 0.5316653635652854, + "grad_norm": 0.31876128911972046, + "learning_rate": 9.186028810200049e-05, + "loss": 0.1216, + "step": 680 + }, + { + "epoch": 0.5324472243940579, + "grad_norm": 0.3542174994945526, + "learning_rate": 9.182293159487281e-05, + "loss": 0.1186, + "step": 681 + }, + { + "epoch": 0.5332290852228303, + "grad_norm": 0.3566511571407318, + "learning_rate": 9.178549719383396e-05, + "loss": 0.1061, + "step": 682 + }, + { + "epoch": 0.5340109460516028, + "grad_norm": 0.3575066030025482, + "learning_rate": 9.174798496860433e-05, + "loss": 0.0926, + "step": 683 + }, + { + "epoch": 0.5347928068803753, + "grad_norm": 0.3495314121246338, + "learning_rate": 9.17103949890493e-05, + "loss": 0.1454, + "step": 684 + }, + { + "epoch": 0.5355746677091477, + "grad_norm": 0.37512341141700745, + "learning_rate": 9.167272732517903e-05, + "loss": 0.1329, + "step": 685 + }, + { + "epoch": 0.5363565285379203, + "grad_norm": 0.3389855921268463, + "learning_rate": 9.163498204714838e-05, + "loss": 0.1467, + "step": 686 + }, + { + "epoch": 0.5371383893666928, + "grad_norm": 0.40289631485939026, + "learning_rate": 9.159715922525673e-05, + "loss": 0.126, + "step": 687 + }, + { + "epoch": 0.5379202501954652, + "grad_norm": 0.31818968057632446, + "learning_rate": 9.155925892994794e-05, + "loss": 0.1571, + "step": 688 + }, + { + "epoch": 0.5387021110242377, + "grad_norm": 0.34707212448120117, + "learning_rate": 9.152128123181013e-05, + "loss": 0.1359, + "step": 689 + }, + { + "epoch": 0.5394839718530101, + "grad_norm": 0.46505051851272583, + "learning_rate": 9.148322620157558e-05, + "loss": 0.1432, + "step": 690 + }, + { + "epoch": 0.5402658326817826, + "grad_norm": 0.4175328016281128, + "learning_rate": 9.14450939101206e-05, + "loss": 0.1469, + "step": 691 + }, + { + "epoch": 0.5410476935105551, + "grad_norm": 0.3285068869590759, + "learning_rate": 9.140688442846539e-05, + "loss": 0.1264, + "step": 692 + }, + { + "epoch": 0.5418295543393276, + "grad_norm": 0.2715577483177185, + "learning_rate": 9.136859782777394e-05, + "loss": 0.1005, + "step": 693 + }, + { + "epoch": 0.5426114151681001, + "grad_norm": 0.33066704869270325, + "learning_rate": 9.133023417935384e-05, + "loss": 0.1466, + "step": 694 + }, + { + "epoch": 0.5433932759968726, + "grad_norm": 0.3862379491329193, + "learning_rate": 9.129179355465621e-05, + "loss": 0.156, + "step": 695 + }, + { + "epoch": 0.544175136825645, + "grad_norm": 0.5101954340934753, + "learning_rate": 9.125327602527551e-05, + "loss": 0.1619, + "step": 696 + }, + { + "epoch": 0.5449569976544175, + "grad_norm": 0.41626206040382385, + "learning_rate": 9.121468166294945e-05, + "loss": 0.1543, + "step": 697 + }, + { + "epoch": 0.54573885848319, + "grad_norm": 0.48254093527793884, + "learning_rate": 9.11760105395588e-05, + "loss": 0.1771, + "step": 698 + }, + { + "epoch": 0.5465207193119624, + "grad_norm": 0.44880276918411255, + "learning_rate": 9.113726272712734e-05, + "loss": 0.1401, + "step": 699 + }, + { + "epoch": 0.547302580140735, + "grad_norm": 0.3268358111381531, + "learning_rate": 9.10984382978217e-05, + "loss": 0.1429, + "step": 700 + }, + { + "epoch": 0.5480844409695075, + "grad_norm": 0.41424036026000977, + "learning_rate": 9.105953732395116e-05, + "loss": 0.1199, + "step": 701 + }, + { + "epoch": 0.5488663017982799, + "grad_norm": 0.5000854134559631, + "learning_rate": 9.102055987796755e-05, + "loss": 0.1374, + "step": 702 + }, + { + "epoch": 0.5496481626270524, + "grad_norm": 0.3157268464565277, + "learning_rate": 9.098150603246517e-05, + "loss": 0.1651, + "step": 703 + }, + { + "epoch": 0.5504300234558248, + "grad_norm": 0.45736557245254517, + "learning_rate": 9.09423758601806e-05, + "loss": 0.1301, + "step": 704 + }, + { + "epoch": 0.5512118842845973, + "grad_norm": 0.43551793694496155, + "learning_rate": 9.090316943399255e-05, + "loss": 0.1556, + "step": 705 + }, + { + "epoch": 0.5519937451133698, + "grad_norm": 0.3782845139503479, + "learning_rate": 9.08638868269218e-05, + "loss": 0.1131, + "step": 706 + }, + { + "epoch": 0.5527756059421423, + "grad_norm": 0.5090707540512085, + "learning_rate": 9.082452811213095e-05, + "loss": 0.1539, + "step": 707 + }, + { + "epoch": 0.5535574667709148, + "grad_norm": 0.33386969566345215, + "learning_rate": 9.078509336292443e-05, + "loss": 0.1171, + "step": 708 + }, + { + "epoch": 0.5543393275996873, + "grad_norm": 0.3908303678035736, + "learning_rate": 9.074558265274819e-05, + "loss": 0.1643, + "step": 709 + }, + { + "epoch": 0.5551211884284597, + "grad_norm": 0.28425249457359314, + "learning_rate": 9.070599605518974e-05, + "loss": 0.0953, + "step": 710 + }, + { + "epoch": 0.5559030492572322, + "grad_norm": 0.327131062746048, + "learning_rate": 9.066633364397786e-05, + "loss": 0.0948, + "step": 711 + }, + { + "epoch": 0.5566849100860047, + "grad_norm": 0.37248674035072327, + "learning_rate": 9.062659549298256e-05, + "loss": 0.1285, + "step": 712 + }, + { + "epoch": 0.5574667709147771, + "grad_norm": 0.3073717951774597, + "learning_rate": 9.058678167621493e-05, + "loss": 0.1424, + "step": 713 + }, + { + "epoch": 0.5582486317435497, + "grad_norm": 0.44110536575317383, + "learning_rate": 9.054689226782695e-05, + "loss": 0.1259, + "step": 714 + }, + { + "epoch": 0.5590304925723222, + "grad_norm": 0.3249659240245819, + "learning_rate": 9.050692734211142e-05, + "loss": 0.1455, + "step": 715 + }, + { + "epoch": 0.5598123534010946, + "grad_norm": 0.5413174629211426, + "learning_rate": 9.046688697350174e-05, + "loss": 0.1458, + "step": 716 + }, + { + "epoch": 0.5605942142298671, + "grad_norm": 0.3194544017314911, + "learning_rate": 9.042677123657191e-05, + "loss": 0.0932, + "step": 717 + }, + { + "epoch": 0.5613760750586395, + "grad_norm": 0.3347417712211609, + "learning_rate": 9.03865802060362e-05, + "loss": 0.1552, + "step": 718 + }, + { + "epoch": 0.562157935887412, + "grad_norm": 0.32833972573280334, + "learning_rate": 9.034631395674917e-05, + "loss": 0.1381, + "step": 719 + }, + { + "epoch": 0.5629397967161845, + "grad_norm": 0.46047019958496094, + "learning_rate": 9.030597256370544e-05, + "loss": 0.1887, + "step": 720 + }, + { + "epoch": 0.563721657544957, + "grad_norm": 0.48033323884010315, + "learning_rate": 9.026555610203964e-05, + "loss": 0.1406, + "step": 721 + }, + { + "epoch": 0.5645035183737295, + "grad_norm": 0.3175593316555023, + "learning_rate": 9.022506464702613e-05, + "loss": 0.1145, + "step": 722 + }, + { + "epoch": 0.565285379202502, + "grad_norm": 0.37334251403808594, + "learning_rate": 9.018449827407905e-05, + "loss": 0.1546, + "step": 723 + }, + { + "epoch": 0.5660672400312744, + "grad_norm": 0.4068865180015564, + "learning_rate": 9.014385705875192e-05, + "loss": 0.1292, + "step": 724 + }, + { + "epoch": 0.5668491008600469, + "grad_norm": 0.5138534903526306, + "learning_rate": 9.010314107673783e-05, + "loss": 0.1815, + "step": 725 + }, + { + "epoch": 0.5676309616888194, + "grad_norm": 0.4154388904571533, + "learning_rate": 9.006235040386897e-05, + "loss": 0.1455, + "step": 726 + }, + { + "epoch": 0.5684128225175918, + "grad_norm": 0.40405139327049255, + "learning_rate": 9.002148511611675e-05, + "loss": 0.1154, + "step": 727 + }, + { + "epoch": 0.5691946833463644, + "grad_norm": 0.30438432097435, + "learning_rate": 8.998054528959145e-05, + "loss": 0.1383, + "step": 728 + }, + { + "epoch": 0.5699765441751369, + "grad_norm": 0.31808093190193176, + "learning_rate": 8.993953100054224e-05, + "loss": 0.148, + "step": 729 + }, + { + "epoch": 0.5707584050039093, + "grad_norm": 0.4289185702800751, + "learning_rate": 8.989844232535699e-05, + "loss": 0.1508, + "step": 730 + }, + { + "epoch": 0.5715402658326818, + "grad_norm": 0.4019654393196106, + "learning_rate": 8.985727934056207e-05, + "loss": 0.114, + "step": 731 + }, + { + "epoch": 0.5723221266614542, + "grad_norm": 0.4127427339553833, + "learning_rate": 8.981604212282222e-05, + "loss": 0.1474, + "step": 732 + }, + { + "epoch": 0.5731039874902267, + "grad_norm": 0.45876824855804443, + "learning_rate": 8.977473074894052e-05, + "loss": 0.1567, + "step": 733 + }, + { + "epoch": 0.5738858483189992, + "grad_norm": 0.32999297976493835, + "learning_rate": 8.973334529585813e-05, + "loss": 0.1318, + "step": 734 + }, + { + "epoch": 0.5746677091477717, + "grad_norm": 0.378089964389801, + "learning_rate": 8.969188584065412e-05, + "loss": 0.1084, + "step": 735 + }, + { + "epoch": 0.5754495699765442, + "grad_norm": 0.32786768674850464, + "learning_rate": 8.965035246054549e-05, + "loss": 0.0991, + "step": 736 + }, + { + "epoch": 0.5762314308053167, + "grad_norm": 0.31518277525901794, + "learning_rate": 8.960874523288683e-05, + "loss": 0.1247, + "step": 737 + }, + { + "epoch": 0.5770132916340891, + "grad_norm": 0.2880820035934448, + "learning_rate": 8.956706423517034e-05, + "loss": 0.0881, + "step": 738 + }, + { + "epoch": 0.5777951524628616, + "grad_norm": 0.4550432860851288, + "learning_rate": 8.952530954502557e-05, + "loss": 0.1425, + "step": 739 + }, + { + "epoch": 0.5785770132916341, + "grad_norm": 0.29489225149154663, + "learning_rate": 8.948348124021933e-05, + "loss": 0.0933, + "step": 740 + }, + { + "epoch": 0.5793588741204065, + "grad_norm": 0.4262470304965973, + "learning_rate": 8.944157939865556e-05, + "loss": 0.127, + "step": 741 + }, + { + "epoch": 0.5801407349491791, + "grad_norm": 0.32060596346855164, + "learning_rate": 8.93996040983751e-05, + "loss": 0.1457, + "step": 742 + }, + { + "epoch": 0.5809225957779516, + "grad_norm": 0.325571209192276, + "learning_rate": 8.935755541755569e-05, + "loss": 0.1188, + "step": 743 + }, + { + "epoch": 0.581704456606724, + "grad_norm": 0.3205192983150482, + "learning_rate": 8.931543343451162e-05, + "loss": 0.114, + "step": 744 + }, + { + "epoch": 0.5824863174354965, + "grad_norm": 0.36551687121391296, + "learning_rate": 8.927323822769386e-05, + "loss": 0.1435, + "step": 745 + }, + { + "epoch": 0.5832681782642689, + "grad_norm": 0.4025515615940094, + "learning_rate": 8.923096987568965e-05, + "loss": 0.1171, + "step": 746 + }, + { + "epoch": 0.5840500390930414, + "grad_norm": 0.46686384081840515, + "learning_rate": 8.918862845722243e-05, + "loss": 0.1771, + "step": 747 + }, + { + "epoch": 0.584831899921814, + "grad_norm": 0.3949642777442932, + "learning_rate": 8.914621405115185e-05, + "loss": 0.1385, + "step": 748 + }, + { + "epoch": 0.5856137607505864, + "grad_norm": 0.37240198254585266, + "learning_rate": 8.910372673647336e-05, + "loss": 0.1211, + "step": 749 + }, + { + "epoch": 0.5863956215793589, + "grad_norm": 0.4359165132045746, + "learning_rate": 8.906116659231829e-05, + "loss": 0.1488, + "step": 750 + }, + { + "epoch": 0.5871774824081314, + "grad_norm": 0.4202546775341034, + "learning_rate": 8.901853369795361e-05, + "loss": 0.1468, + "step": 751 + }, + { + "epoch": 0.5879593432369038, + "grad_norm": 0.4482746124267578, + "learning_rate": 8.897582813278173e-05, + "loss": 0.1645, + "step": 752 + }, + { + "epoch": 0.5887412040656763, + "grad_norm": 0.4343931972980499, + "learning_rate": 8.893304997634045e-05, + "loss": 0.1803, + "step": 753 + }, + { + "epoch": 0.5895230648944488, + "grad_norm": 0.3664369285106659, + "learning_rate": 8.889019930830276e-05, + "loss": 0.122, + "step": 754 + }, + { + "epoch": 0.5903049257232212, + "grad_norm": 0.3301883637905121, + "learning_rate": 8.884727620847669e-05, + "loss": 0.1462, + "step": 755 + }, + { + "epoch": 0.5910867865519938, + "grad_norm": 0.4846368432044983, + "learning_rate": 8.88042807568052e-05, + "loss": 0.1361, + "step": 756 + }, + { + "epoch": 0.5918686473807663, + "grad_norm": 0.42507967352867126, + "learning_rate": 8.876121303336596e-05, + "loss": 0.1647, + "step": 757 + }, + { + "epoch": 0.5926505082095387, + "grad_norm": 0.2991779148578644, + "learning_rate": 8.871807311837128e-05, + "loss": 0.1248, + "step": 758 + }, + { + "epoch": 0.5934323690383112, + "grad_norm": 0.28716185688972473, + "learning_rate": 8.867486109216789e-05, + "loss": 0.097, + "step": 759 + }, + { + "epoch": 0.5942142298670836, + "grad_norm": 0.3456084430217743, + "learning_rate": 8.863157703523689e-05, + "loss": 0.124, + "step": 760 + }, + { + "epoch": 0.5949960906958561, + "grad_norm": 0.3598391115665436, + "learning_rate": 8.858822102819347e-05, + "loss": 0.1193, + "step": 761 + }, + { + "epoch": 0.5957779515246286, + "grad_norm": 0.42036497592926025, + "learning_rate": 8.854479315178681e-05, + "loss": 0.1636, + "step": 762 + }, + { + "epoch": 0.596559812353401, + "grad_norm": 0.3865693211555481, + "learning_rate": 8.850129348690004e-05, + "loss": 0.1576, + "step": 763 + }, + { + "epoch": 0.5973416731821736, + "grad_norm": 0.425310343503952, + "learning_rate": 8.845772211454992e-05, + "loss": 0.1141, + "step": 764 + }, + { + "epoch": 0.5981235340109461, + "grad_norm": 0.37581977248191833, + "learning_rate": 8.841407911588675e-05, + "loss": 0.1325, + "step": 765 + }, + { + "epoch": 0.5989053948397185, + "grad_norm": 0.2963871955871582, + "learning_rate": 8.837036457219427e-05, + "loss": 0.1279, + "step": 766 + }, + { + "epoch": 0.599687255668491, + "grad_norm": 0.4006364643573761, + "learning_rate": 8.832657856488949e-05, + "loss": 0.1468, + "step": 767 + }, + { + "epoch": 0.6004691164972635, + "grad_norm": 0.381542831659317, + "learning_rate": 8.828272117552245e-05, + "loss": 0.1094, + "step": 768 + }, + { + "epoch": 0.6012509773260359, + "grad_norm": 0.3772754371166229, + "learning_rate": 8.82387924857762e-05, + "loss": 0.1445, + "step": 769 + }, + { + "epoch": 0.6020328381548085, + "grad_norm": 0.34870976209640503, + "learning_rate": 8.819479257746655e-05, + "loss": 0.1258, + "step": 770 + }, + { + "epoch": 0.602814698983581, + "grad_norm": 0.28418296575546265, + "learning_rate": 8.815072153254195e-05, + "loss": 0.1116, + "step": 771 + }, + { + "epoch": 0.6035965598123534, + "grad_norm": 0.36682528257369995, + "learning_rate": 8.810657943308338e-05, + "loss": 0.1149, + "step": 772 + }, + { + "epoch": 0.6043784206411259, + "grad_norm": 0.3400062918663025, + "learning_rate": 8.806236636130411e-05, + "loss": 0.1356, + "step": 773 + }, + { + "epoch": 0.6051602814698983, + "grad_norm": 0.35574647784233093, + "learning_rate": 8.801808239954962e-05, + "loss": 0.1257, + "step": 774 + }, + { + "epoch": 0.6059421422986708, + "grad_norm": 0.3617613911628723, + "learning_rate": 8.797372763029742e-05, + "loss": 0.1146, + "step": 775 + }, + { + "epoch": 0.6067240031274433, + "grad_norm": 0.3895418345928192, + "learning_rate": 8.792930213615688e-05, + "loss": 0.0915, + "step": 776 + }, + { + "epoch": 0.6075058639562158, + "grad_norm": 0.42234644293785095, + "learning_rate": 8.78848059998691e-05, + "loss": 0.1145, + "step": 777 + }, + { + "epoch": 0.6082877247849883, + "grad_norm": 0.37372446060180664, + "learning_rate": 8.784023930430676e-05, + "loss": 0.1108, + "step": 778 + }, + { + "epoch": 0.6090695856137608, + "grad_norm": 0.4088364243507385, + "learning_rate": 8.779560213247395e-05, + "loss": 0.1355, + "step": 779 + }, + { + "epoch": 0.6098514464425332, + "grad_norm": 0.4049277603626251, + "learning_rate": 8.7750894567506e-05, + "loss": 0.142, + "step": 780 + }, + { + "epoch": 0.6106333072713057, + "grad_norm": 0.3075539469718933, + "learning_rate": 8.770611669266938e-05, + "loss": 0.1148, + "step": 781 + }, + { + "epoch": 0.6114151681000782, + "grad_norm": 0.399598091840744, + "learning_rate": 8.766126859136147e-05, + "loss": 0.129, + "step": 782 + }, + { + "epoch": 0.6121970289288506, + "grad_norm": 0.3626682758331299, + "learning_rate": 8.76163503471105e-05, + "loss": 0.1815, + "step": 783 + }, + { + "epoch": 0.6129788897576232, + "grad_norm": 0.3200612962245941, + "learning_rate": 8.757136204357527e-05, + "loss": 0.1188, + "step": 784 + }, + { + "epoch": 0.6137607505863957, + "grad_norm": 0.37934356927871704, + "learning_rate": 8.752630376454511e-05, + "loss": 0.0982, + "step": 785 + }, + { + "epoch": 0.6145426114151681, + "grad_norm": 0.3180946707725525, + "learning_rate": 8.748117559393967e-05, + "loss": 0.1988, + "step": 786 + }, + { + "epoch": 0.6153244722439406, + "grad_norm": 0.33251526951789856, + "learning_rate": 8.743597761580877e-05, + "loss": 0.1322, + "step": 787 + }, + { + "epoch": 0.616106333072713, + "grad_norm": 0.38982832431793213, + "learning_rate": 8.739070991433222e-05, + "loss": 0.0903, + "step": 788 + }, + { + "epoch": 0.6168881939014855, + "grad_norm": 0.3586364984512329, + "learning_rate": 8.734537257381973e-05, + "loss": 0.1362, + "step": 789 + }, + { + "epoch": 0.617670054730258, + "grad_norm": 0.42748382687568665, + "learning_rate": 8.729996567871068e-05, + "loss": 0.1566, + "step": 790 + }, + { + "epoch": 0.6184519155590305, + "grad_norm": 0.29496240615844727, + "learning_rate": 8.7254489313574e-05, + "loss": 0.1388, + "step": 791 + }, + { + "epoch": 0.619233776387803, + "grad_norm": 0.35175201296806335, + "learning_rate": 8.7208943563108e-05, + "loss": 0.1613, + "step": 792 + }, + { + "epoch": 0.6200156372165755, + "grad_norm": 0.3887656629085541, + "learning_rate": 8.716332851214024e-05, + "loss": 0.1116, + "step": 793 + }, + { + "epoch": 0.6207974980453479, + "grad_norm": 0.32240673899650574, + "learning_rate": 8.711764424562735e-05, + "loss": 0.1502, + "step": 794 + }, + { + "epoch": 0.6215793588741204, + "grad_norm": 0.3740167021751404, + "learning_rate": 8.707189084865481e-05, + "loss": 0.1332, + "step": 795 + }, + { + "epoch": 0.6223612197028929, + "grad_norm": 0.35816818475723267, + "learning_rate": 8.702606840643694e-05, + "loss": 0.1112, + "step": 796 + }, + { + "epoch": 0.6231430805316653, + "grad_norm": 0.33102670311927795, + "learning_rate": 8.698017700431662e-05, + "loss": 0.1437, + "step": 797 + }, + { + "epoch": 0.6239249413604379, + "grad_norm": 0.42163562774658203, + "learning_rate": 8.693421672776513e-05, + "loss": 0.1285, + "step": 798 + }, + { + "epoch": 0.6247068021892104, + "grad_norm": 0.2662132680416107, + "learning_rate": 8.688818766238208e-05, + "loss": 0.1039, + "step": 799 + }, + { + "epoch": 0.6254886630179828, + "grad_norm": 0.3629731237888336, + "learning_rate": 8.684208989389518e-05, + "loss": 0.1457, + "step": 800 + }, + { + "epoch": 0.6254886630179828, + "eval_loss": 0.14057856798171997, + "eval_runtime": 13.4351, + "eval_samples_per_second": 3.87, + "eval_steps_per_second": 0.968, + "step": 800 + }, + { + "epoch": 0.6262705238467553, + "grad_norm": 0.3074587285518646, + "learning_rate": 8.679592350816007e-05, + "loss": 0.1022, + "step": 801 + }, + { + "epoch": 0.6270523846755277, + "grad_norm": 0.3629996180534363, + "learning_rate": 8.67496885911602e-05, + "loss": 0.1427, + "step": 802 + }, + { + "epoch": 0.6278342455043002, + "grad_norm": 0.3123893439769745, + "learning_rate": 8.670338522900672e-05, + "loss": 0.1133, + "step": 803 + }, + { + "epoch": 0.6286161063330727, + "grad_norm": 0.3146839141845703, + "learning_rate": 8.665701350793816e-05, + "loss": 0.123, + "step": 804 + }, + { + "epoch": 0.6293979671618452, + "grad_norm": 0.40664559602737427, + "learning_rate": 8.66105735143204e-05, + "loss": 0.1416, + "step": 805 + }, + { + "epoch": 0.6301798279906177, + "grad_norm": 0.528860867023468, + "learning_rate": 8.656406533464654e-05, + "loss": 0.149, + "step": 806 + }, + { + "epoch": 0.6309616888193902, + "grad_norm": 0.317931592464447, + "learning_rate": 8.651748905553656e-05, + "loss": 0.1103, + "step": 807 + }, + { + "epoch": 0.6317435496481626, + "grad_norm": 0.40084466338157654, + "learning_rate": 8.647084476373737e-05, + "loss": 0.1439, + "step": 808 + }, + { + "epoch": 0.6325254104769351, + "grad_norm": 0.4050627052783966, + "learning_rate": 8.642413254612251e-05, + "loss": 0.1535, + "step": 809 + }, + { + "epoch": 0.6333072713057076, + "grad_norm": 0.39978843927383423, + "learning_rate": 8.637735248969203e-05, + "loss": 0.1434, + "step": 810 + }, + { + "epoch": 0.63408913213448, + "grad_norm": 0.41895702481269836, + "learning_rate": 8.633050468157234e-05, + "loss": 0.1349, + "step": 811 + }, + { + "epoch": 0.6348709929632526, + "grad_norm": 0.3688557744026184, + "learning_rate": 8.628358920901601e-05, + "loss": 0.1796, + "step": 812 + }, + { + "epoch": 0.6356528537920251, + "grad_norm": 0.3450753390789032, + "learning_rate": 8.623660615940165e-05, + "loss": 0.104, + "step": 813 + }, + { + "epoch": 0.6364347146207975, + "grad_norm": 0.39041540026664734, + "learning_rate": 8.618955562023378e-05, + "loss": 0.1186, + "step": 814 + }, + { + "epoch": 0.63721657544957, + "grad_norm": 0.3215588629245758, + "learning_rate": 8.61424376791425e-05, + "loss": 0.1145, + "step": 815 + }, + { + "epoch": 0.6379984362783424, + "grad_norm": 0.3149644732475281, + "learning_rate": 8.609525242388355e-05, + "loss": 0.1103, + "step": 816 + }, + { + "epoch": 0.6387802971071149, + "grad_norm": 0.3476389944553375, + "learning_rate": 8.604799994233798e-05, + "loss": 0.1299, + "step": 817 + }, + { + "epoch": 0.6395621579358874, + "grad_norm": 0.36129891872406006, + "learning_rate": 8.60006803225121e-05, + "loss": 0.1372, + "step": 818 + }, + { + "epoch": 0.6403440187646599, + "grad_norm": 0.3266359865665436, + "learning_rate": 8.595329365253719e-05, + "loss": 0.0957, + "step": 819 + }, + { + "epoch": 0.6411258795934324, + "grad_norm": 0.4800041913986206, + "learning_rate": 8.590584002066949e-05, + "loss": 0.1409, + "step": 820 + }, + { + "epoch": 0.6419077404222049, + "grad_norm": 0.34764230251312256, + "learning_rate": 8.585831951528991e-05, + "loss": 0.1204, + "step": 821 + }, + { + "epoch": 0.6426896012509773, + "grad_norm": 0.45669716596603394, + "learning_rate": 8.58107322249039e-05, + "loss": 0.1792, + "step": 822 + }, + { + "epoch": 0.6434714620797498, + "grad_norm": 0.3573746681213379, + "learning_rate": 8.576307823814132e-05, + "loss": 0.1175, + "step": 823 + }, + { + "epoch": 0.6442533229085223, + "grad_norm": 0.4184928238391876, + "learning_rate": 8.571535764375625e-05, + "loss": 0.1392, + "step": 824 + }, + { + "epoch": 0.6450351837372947, + "grad_norm": 0.4077909588813782, + "learning_rate": 8.566757053062678e-05, + "loss": 0.1322, + "step": 825 + }, + { + "epoch": 0.6458170445660673, + "grad_norm": 0.2542969584465027, + "learning_rate": 8.561971698775496e-05, + "loss": 0.0923, + "step": 826 + }, + { + "epoch": 0.6465989053948398, + "grad_norm": 0.3500221073627472, + "learning_rate": 8.55717971042665e-05, + "loss": 0.123, + "step": 827 + }, + { + "epoch": 0.6473807662236122, + "grad_norm": 0.3936273753643036, + "learning_rate": 8.552381096941074e-05, + "loss": 0.1216, + "step": 828 + }, + { + "epoch": 0.6481626270523847, + "grad_norm": 0.37407663464546204, + "learning_rate": 8.54757586725603e-05, + "loss": 0.1401, + "step": 829 + }, + { + "epoch": 0.6489444878811571, + "grad_norm": 0.3651149868965149, + "learning_rate": 8.542764030321115e-05, + "loss": 0.1006, + "step": 830 + }, + { + "epoch": 0.6497263487099296, + "grad_norm": 0.3681272268295288, + "learning_rate": 8.537945595098222e-05, + "loss": 0.1314, + "step": 831 + }, + { + "epoch": 0.6505082095387021, + "grad_norm": 0.37228938937187195, + "learning_rate": 8.53312057056154e-05, + "loss": 0.1523, + "step": 832 + }, + { + "epoch": 0.6512900703674745, + "grad_norm": 0.2094591110944748, + "learning_rate": 8.528288965697523e-05, + "loss": 0.0931, + "step": 833 + }, + { + "epoch": 0.6520719311962471, + "grad_norm": 0.39157184958457947, + "learning_rate": 8.52345078950489e-05, + "loss": 0.1123, + "step": 834 + }, + { + "epoch": 0.6528537920250196, + "grad_norm": 0.34393438696861267, + "learning_rate": 8.518606050994591e-05, + "loss": 0.1333, + "step": 835 + }, + { + "epoch": 0.653635652853792, + "grad_norm": 0.2535848021507263, + "learning_rate": 8.513754759189801e-05, + "loss": 0.0982, + "step": 836 + }, + { + "epoch": 0.6544175136825645, + "grad_norm": 0.3018800914287567, + "learning_rate": 8.508896923125901e-05, + "loss": 0.1341, + "step": 837 + }, + { + "epoch": 0.655199374511337, + "grad_norm": 0.3368026316165924, + "learning_rate": 8.50403255185046e-05, + "loss": 0.1357, + "step": 838 + }, + { + "epoch": 0.6559812353401094, + "grad_norm": 0.5133249163627625, + "learning_rate": 8.499161654423219e-05, + "loss": 0.139, + "step": 839 + }, + { + "epoch": 0.656763096168882, + "grad_norm": 0.36635470390319824, + "learning_rate": 8.494284239916071e-05, + "loss": 0.1016, + "step": 840 + }, + { + "epoch": 0.6575449569976545, + "grad_norm": 0.2705834209918976, + "learning_rate": 8.48940031741305e-05, + "loss": 0.0975, + "step": 841 + }, + { + "epoch": 0.6583268178264269, + "grad_norm": 0.4135679602622986, + "learning_rate": 8.484509896010311e-05, + "loss": 0.1148, + "step": 842 + }, + { + "epoch": 0.6591086786551994, + "grad_norm": 0.34528687596321106, + "learning_rate": 8.479612984816112e-05, + "loss": 0.1309, + "step": 843 + }, + { + "epoch": 0.6598905394839718, + "grad_norm": 0.33708104491233826, + "learning_rate": 8.474709592950798e-05, + "loss": 0.1598, + "step": 844 + }, + { + "epoch": 0.6606724003127443, + "grad_norm": 0.2598781883716583, + "learning_rate": 8.469799729546781e-05, + "loss": 0.0945, + "step": 845 + }, + { + "epoch": 0.6614542611415168, + "grad_norm": 0.32785603404045105, + "learning_rate": 8.464883403748534e-05, + "loss": 0.2023, + "step": 846 + }, + { + "epoch": 0.6622361219702892, + "grad_norm": 0.3188624978065491, + "learning_rate": 8.459960624712556e-05, + "loss": 0.1078, + "step": 847 + }, + { + "epoch": 0.6630179827990618, + "grad_norm": 0.36854130029678345, + "learning_rate": 8.455031401607375e-05, + "loss": 0.1239, + "step": 848 + }, + { + "epoch": 0.6637998436278343, + "grad_norm": 0.32640159130096436, + "learning_rate": 8.450095743613512e-05, + "loss": 0.1128, + "step": 849 + }, + { + "epoch": 0.6645817044566067, + "grad_norm": 0.4553893208503723, + "learning_rate": 8.44515365992348e-05, + "loss": 0.1313, + "step": 850 + }, + { + "epoch": 0.6653635652853792, + "grad_norm": 0.3296887278556824, + "learning_rate": 8.440205159741752e-05, + "loss": 0.1455, + "step": 851 + }, + { + "epoch": 0.6661454261141517, + "grad_norm": 0.30615031719207764, + "learning_rate": 8.435250252284762e-05, + "loss": 0.1123, + "step": 852 + }, + { + "epoch": 0.6669272869429241, + "grad_norm": 0.4171629548072815, + "learning_rate": 8.430288946780865e-05, + "loss": 0.1156, + "step": 853 + }, + { + "epoch": 0.6677091477716967, + "grad_norm": 0.42535823583602905, + "learning_rate": 8.425321252470342e-05, + "loss": 0.1247, + "step": 854 + }, + { + "epoch": 0.6684910086004691, + "grad_norm": 0.412070095539093, + "learning_rate": 8.420347178605367e-05, + "loss": 0.1028, + "step": 855 + }, + { + "epoch": 0.6692728694292416, + "grad_norm": 0.3225451707839966, + "learning_rate": 8.415366734450003e-05, + "loss": 0.1206, + "step": 856 + }, + { + "epoch": 0.6700547302580141, + "grad_norm": 0.4608080983161926, + "learning_rate": 8.410379929280168e-05, + "loss": 0.1485, + "step": 857 + }, + { + "epoch": 0.6708365910867865, + "grad_norm": 0.3869389295578003, + "learning_rate": 8.405386772383634e-05, + "loss": 0.1591, + "step": 858 + }, + { + "epoch": 0.671618451915559, + "grad_norm": 0.35010966658592224, + "learning_rate": 8.400387273059998e-05, + "loss": 0.1151, + "step": 859 + }, + { + "epoch": 0.6724003127443315, + "grad_norm": 0.4299210011959076, + "learning_rate": 8.395381440620674e-05, + "loss": 0.1223, + "step": 860 + }, + { + "epoch": 0.673182173573104, + "grad_norm": 0.39979061484336853, + "learning_rate": 8.39036928438887e-05, + "loss": 0.1436, + "step": 861 + }, + { + "epoch": 0.6739640344018765, + "grad_norm": 0.3123779892921448, + "learning_rate": 8.385350813699571e-05, + "loss": 0.1301, + "step": 862 + }, + { + "epoch": 0.674745895230649, + "grad_norm": 0.4949374496936798, + "learning_rate": 8.380326037899522e-05, + "loss": 0.172, + "step": 863 + }, + { + "epoch": 0.6755277560594214, + "grad_norm": 0.3271320164203644, + "learning_rate": 8.375294966347212e-05, + "loss": 0.1269, + "step": 864 + }, + { + "epoch": 0.6763096168881939, + "grad_norm": 0.33784911036491394, + "learning_rate": 8.370257608412857e-05, + "loss": 0.1374, + "step": 865 + }, + { + "epoch": 0.6770914777169664, + "grad_norm": 0.4051656424999237, + "learning_rate": 8.365213973478378e-05, + "loss": 0.1078, + "step": 866 + }, + { + "epoch": 0.6778733385457388, + "grad_norm": 0.40610331296920776, + "learning_rate": 8.360164070937389e-05, + "loss": 0.116, + "step": 867 + }, + { + "epoch": 0.6786551993745114, + "grad_norm": 0.35644418001174927, + "learning_rate": 8.355107910195175e-05, + "loss": 0.0986, + "step": 868 + }, + { + "epoch": 0.6794370602032838, + "grad_norm": 0.4006430208683014, + "learning_rate": 8.350045500668681e-05, + "loss": 0.1336, + "step": 869 + }, + { + "epoch": 0.6802189210320563, + "grad_norm": 0.36977195739746094, + "learning_rate": 8.344976851786487e-05, + "loss": 0.1294, + "step": 870 + }, + { + "epoch": 0.6810007818608288, + "grad_norm": 0.39970162510871887, + "learning_rate": 8.339901972988795e-05, + "loss": 0.1347, + "step": 871 + }, + { + "epoch": 0.6817826426896012, + "grad_norm": 0.36577853560447693, + "learning_rate": 8.334820873727406e-05, + "loss": 0.1194, + "step": 872 + }, + { + "epoch": 0.6825645035183737, + "grad_norm": 0.40536704659461975, + "learning_rate": 8.329733563465711e-05, + "loss": 0.1342, + "step": 873 + }, + { + "epoch": 0.6833463643471462, + "grad_norm": 0.37863144278526306, + "learning_rate": 8.324640051678668e-05, + "loss": 0.1283, + "step": 874 + }, + { + "epoch": 0.6841282251759186, + "grad_norm": 0.36349162459373474, + "learning_rate": 8.319540347852786e-05, + "loss": 0.138, + "step": 875 + }, + { + "epoch": 0.6849100860046912, + "grad_norm": 0.34184351563453674, + "learning_rate": 8.314434461486101e-05, + "loss": 0.0996, + "step": 876 + }, + { + "epoch": 0.6856919468334637, + "grad_norm": 0.4352666735649109, + "learning_rate": 8.30932240208817e-05, + "loss": 0.1489, + "step": 877 + }, + { + "epoch": 0.6864738076622361, + "grad_norm": 0.3389497399330139, + "learning_rate": 8.304204179180046e-05, + "loss": 0.1134, + "step": 878 + }, + { + "epoch": 0.6872556684910086, + "grad_norm": 0.48941102623939514, + "learning_rate": 8.299079802294258e-05, + "loss": 0.1533, + "step": 879 + }, + { + "epoch": 0.6880375293197811, + "grad_norm": 0.3835209608078003, + "learning_rate": 8.2939492809748e-05, + "loss": 0.1189, + "step": 880 + }, + { + "epoch": 0.6888193901485535, + "grad_norm": 0.3911098837852478, + "learning_rate": 8.288812624777109e-05, + "loss": 0.1267, + "step": 881 + }, + { + "epoch": 0.689601250977326, + "grad_norm": 0.3850928843021393, + "learning_rate": 8.283669843268047e-05, + "loss": 0.1221, + "step": 882 + }, + { + "epoch": 0.6903831118060985, + "grad_norm": 0.3491794764995575, + "learning_rate": 8.278520946025884e-05, + "loss": 0.1099, + "step": 883 + }, + { + "epoch": 0.691164972634871, + "grad_norm": 0.26835763454437256, + "learning_rate": 8.273365942640283e-05, + "loss": 0.1204, + "step": 884 + }, + { + "epoch": 0.6919468334636435, + "grad_norm": 0.40667450428009033, + "learning_rate": 8.268204842712278e-05, + "loss": 0.1487, + "step": 885 + }, + { + "epoch": 0.6927286942924159, + "grad_norm": 0.342351496219635, + "learning_rate": 8.263037655854254e-05, + "loss": 0.1268, + "step": 886 + }, + { + "epoch": 0.6935105551211884, + "grad_norm": 0.37458154559135437, + "learning_rate": 8.25786439168994e-05, + "loss": 0.108, + "step": 887 + }, + { + "epoch": 0.6942924159499609, + "grad_norm": 0.4606289863586426, + "learning_rate": 8.252685059854376e-05, + "loss": 0.1494, + "step": 888 + }, + { + "epoch": 0.6950742767787333, + "grad_norm": 0.35914990305900574, + "learning_rate": 8.24749966999391e-05, + "loss": 0.1142, + "step": 889 + }, + { + "epoch": 0.6958561376075059, + "grad_norm": 0.3088033199310303, + "learning_rate": 8.242308231766169e-05, + "loss": 0.107, + "step": 890 + }, + { + "epoch": 0.6966379984362784, + "grad_norm": 0.3469317853450775, + "learning_rate": 8.237110754840043e-05, + "loss": 0.105, + "step": 891 + }, + { + "epoch": 0.6974198592650508, + "grad_norm": 0.38797950744628906, + "learning_rate": 8.231907248895674e-05, + "loss": 0.1486, + "step": 892 + }, + { + "epoch": 0.6982017200938233, + "grad_norm": 0.23797141015529633, + "learning_rate": 8.22669772362443e-05, + "loss": 0.0972, + "step": 893 + }, + { + "epoch": 0.6989835809225958, + "grad_norm": 0.3245048522949219, + "learning_rate": 8.221482188728888e-05, + "loss": 0.1178, + "step": 894 + }, + { + "epoch": 0.6997654417513682, + "grad_norm": 0.30810755491256714, + "learning_rate": 8.216260653922823e-05, + "loss": 0.139, + "step": 895 + }, + { + "epoch": 0.7005473025801408, + "grad_norm": 0.3993600606918335, + "learning_rate": 8.211033128931176e-05, + "loss": 0.125, + "step": 896 + }, + { + "epoch": 0.7013291634089132, + "grad_norm": 0.33253660798072815, + "learning_rate": 8.205799623490055e-05, + "loss": 0.1001, + "step": 897 + }, + { + "epoch": 0.7021110242376857, + "grad_norm": 0.4122001528739929, + "learning_rate": 8.200560147346703e-05, + "loss": 0.1307, + "step": 898 + }, + { + "epoch": 0.7028928850664582, + "grad_norm": 0.3810223340988159, + "learning_rate": 8.195314710259475e-05, + "loss": 0.1634, + "step": 899 + }, + { + "epoch": 0.7036747458952306, + "grad_norm": 0.3160824775695801, + "learning_rate": 8.190063321997839e-05, + "loss": 0.1637, + "step": 900 + }, + { + "epoch": 0.7044566067240031, + "grad_norm": 0.3434925079345703, + "learning_rate": 8.184805992342342e-05, + "loss": 0.1414, + "step": 901 + }, + { + "epoch": 0.7052384675527756, + "grad_norm": 0.3496757447719574, + "learning_rate": 8.179542731084595e-05, + "loss": 0.1076, + "step": 902 + }, + { + "epoch": 0.706020328381548, + "grad_norm": 0.3597007989883423, + "learning_rate": 8.174273548027262e-05, + "loss": 0.1115, + "step": 903 + }, + { + "epoch": 0.7068021892103206, + "grad_norm": 0.3989991843700409, + "learning_rate": 8.168998452984031e-05, + "loss": 0.1019, + "step": 904 + }, + { + "epoch": 0.7075840500390931, + "grad_norm": 0.35645201802253723, + "learning_rate": 8.163717455779602e-05, + "loss": 0.1351, + "step": 905 + }, + { + "epoch": 0.7083659108678655, + "grad_norm": 0.31483447551727295, + "learning_rate": 8.158430566249671e-05, + "loss": 0.1029, + "step": 906 + }, + { + "epoch": 0.709147771696638, + "grad_norm": 0.3804417848587036, + "learning_rate": 8.153137794240903e-05, + "loss": 0.115, + "step": 907 + }, + { + "epoch": 0.7099296325254105, + "grad_norm": 0.33531495928764343, + "learning_rate": 8.147839149610922e-05, + "loss": 0.1044, + "step": 908 + }, + { + "epoch": 0.7107114933541829, + "grad_norm": 0.4194093942642212, + "learning_rate": 8.142534642228288e-05, + "loss": 0.1219, + "step": 909 + }, + { + "epoch": 0.7114933541829555, + "grad_norm": 0.33107230067253113, + "learning_rate": 8.137224281972485e-05, + "loss": 0.0973, + "step": 910 + }, + { + "epoch": 0.7122752150117279, + "grad_norm": 0.34085744619369507, + "learning_rate": 8.13190807873389e-05, + "loss": 0.1085, + "step": 911 + }, + { + "epoch": 0.7130570758405004, + "grad_norm": 0.43536314368247986, + "learning_rate": 8.126586042413769e-05, + "loss": 0.1466, + "step": 912 + }, + { + "epoch": 0.7138389366692729, + "grad_norm": 0.32383957505226135, + "learning_rate": 8.121258182924247e-05, + "loss": 0.1311, + "step": 913 + }, + { + "epoch": 0.7146207974980453, + "grad_norm": 0.3228246569633484, + "learning_rate": 8.1159245101883e-05, + "loss": 0.1244, + "step": 914 + }, + { + "epoch": 0.7154026583268178, + "grad_norm": 0.35498085618019104, + "learning_rate": 8.110585034139723e-05, + "loss": 0.1174, + "step": 915 + }, + { + "epoch": 0.7161845191555903, + "grad_norm": 0.3768846392631531, + "learning_rate": 8.105239764723131e-05, + "loss": 0.0963, + "step": 916 + }, + { + "epoch": 0.7169663799843627, + "grad_norm": 0.26356974244117737, + "learning_rate": 8.099888711893917e-05, + "loss": 0.1152, + "step": 917 + }, + { + "epoch": 0.7177482408131353, + "grad_norm": 0.3726540803909302, + "learning_rate": 8.094531885618252e-05, + "loss": 0.1068, + "step": 918 + }, + { + "epoch": 0.7185301016419078, + "grad_norm": 0.4119989573955536, + "learning_rate": 8.089169295873058e-05, + "loss": 0.1207, + "step": 919 + }, + { + "epoch": 0.7193119624706802, + "grad_norm": 0.3351033329963684, + "learning_rate": 8.083800952645994e-05, + "loss": 0.0899, + "step": 920 + }, + { + "epoch": 0.7200938232994527, + "grad_norm": 0.3526962399482727, + "learning_rate": 8.078426865935432e-05, + "loss": 0.1478, + "step": 921 + }, + { + "epoch": 0.7208756841282252, + "grad_norm": 0.3071853518486023, + "learning_rate": 8.07304704575044e-05, + "loss": 0.1107, + "step": 922 + }, + { + "epoch": 0.7216575449569976, + "grad_norm": 0.3210884630680084, + "learning_rate": 8.067661502110768e-05, + "loss": 0.0975, + "step": 923 + }, + { + "epoch": 0.7224394057857702, + "grad_norm": 0.34034693241119385, + "learning_rate": 8.06227024504682e-05, + "loss": 0.1235, + "step": 924 + }, + { + "epoch": 0.7232212666145426, + "grad_norm": 0.24603362381458282, + "learning_rate": 8.056873284599648e-05, + "loss": 0.0893, + "step": 925 + }, + { + "epoch": 0.7240031274433151, + "grad_norm": 0.5210705399513245, + "learning_rate": 8.051470630820925e-05, + "loss": 0.1745, + "step": 926 + }, + { + "epoch": 0.7247849882720876, + "grad_norm": 0.36895841360092163, + "learning_rate": 8.046062293772922e-05, + "loss": 0.1238, + "step": 927 + }, + { + "epoch": 0.72556684910086, + "grad_norm": 0.3690948784351349, + "learning_rate": 8.040648283528501e-05, + "loss": 0.1082, + "step": 928 + }, + { + "epoch": 0.7263487099296325, + "grad_norm": 0.3088683784008026, + "learning_rate": 8.035228610171085e-05, + "loss": 0.1122, + "step": 929 + }, + { + "epoch": 0.727130570758405, + "grad_norm": 0.316658616065979, + "learning_rate": 8.029803283794652e-05, + "loss": 0.1129, + "step": 930 + }, + { + "epoch": 0.7279124315871774, + "grad_norm": 0.40790560841560364, + "learning_rate": 8.024372314503701e-05, + "loss": 0.1349, + "step": 931 + }, + { + "epoch": 0.72869429241595, + "grad_norm": 0.46086615324020386, + "learning_rate": 8.018935712413242e-05, + "loss": 0.1845, + "step": 932 + }, + { + "epoch": 0.7294761532447225, + "grad_norm": 0.30622750520706177, + "learning_rate": 8.013493487648782e-05, + "loss": 0.1264, + "step": 933 + }, + { + "epoch": 0.7302580140734949, + "grad_norm": 0.42094358801841736, + "learning_rate": 8.008045650346294e-05, + "loss": 0.1089, + "step": 934 + }, + { + "epoch": 0.7310398749022674, + "grad_norm": 0.33936363458633423, + "learning_rate": 8.002592210652202e-05, + "loss": 0.1073, + "step": 935 + }, + { + "epoch": 0.7318217357310399, + "grad_norm": 0.3029439449310303, + "learning_rate": 7.997133178723374e-05, + "loss": 0.1162, + "step": 936 + }, + { + "epoch": 0.7326035965598123, + "grad_norm": 0.2870144844055176, + "learning_rate": 7.991668564727082e-05, + "loss": 0.1141, + "step": 937 + }, + { + "epoch": 0.7333854573885848, + "grad_norm": 0.3112180233001709, + "learning_rate": 7.986198378841004e-05, + "loss": 0.1262, + "step": 938 + }, + { + "epoch": 0.7341673182173573, + "grad_norm": 0.3226885497570038, + "learning_rate": 7.98072263125319e-05, + "loss": 0.1395, + "step": 939 + }, + { + "epoch": 0.7349491790461298, + "grad_norm": 0.47658324241638184, + "learning_rate": 7.975241332162048e-05, + "loss": 0.1653, + "step": 940 + }, + { + "epoch": 0.7357310398749023, + "grad_norm": 0.37937480211257935, + "learning_rate": 7.969754491776329e-05, + "loss": 0.1274, + "step": 941 + }, + { + "epoch": 0.7365129007036747, + "grad_norm": 0.35611534118652344, + "learning_rate": 7.964262120315103e-05, + "loss": 0.1426, + "step": 942 + }, + { + "epoch": 0.7372947615324472, + "grad_norm": 0.38592514395713806, + "learning_rate": 7.958764228007741e-05, + "loss": 0.1292, + "step": 943 + }, + { + "epoch": 0.7380766223612197, + "grad_norm": 0.4212297797203064, + "learning_rate": 7.953260825093898e-05, + "loss": 0.1283, + "step": 944 + }, + { + "epoch": 0.7388584831899921, + "grad_norm": 0.4761105477809906, + "learning_rate": 7.947751921823488e-05, + "loss": 0.1197, + "step": 945 + }, + { + "epoch": 0.7396403440187647, + "grad_norm": 0.351823627948761, + "learning_rate": 7.942237528456673e-05, + "loss": 0.1195, + "step": 946 + }, + { + "epoch": 0.7404222048475372, + "grad_norm": 0.4467895030975342, + "learning_rate": 7.936717655263841e-05, + "loss": 0.1341, + "step": 947 + }, + { + "epoch": 0.7412040656763096, + "grad_norm": 0.35307154059410095, + "learning_rate": 7.931192312525584e-05, + "loss": 0.1101, + "step": 948 + }, + { + "epoch": 0.7419859265050821, + "grad_norm": 0.32904255390167236, + "learning_rate": 7.925661510532681e-05, + "loss": 0.1324, + "step": 949 + }, + { + "epoch": 0.7427677873338546, + "grad_norm": 0.38342636823654175, + "learning_rate": 7.920125259586078e-05, + "loss": 0.1138, + "step": 950 + }, + { + "epoch": 0.743549648162627, + "grad_norm": 0.46515193581581116, + "learning_rate": 7.91458356999687e-05, + "loss": 0.1151, + "step": 951 + }, + { + "epoch": 0.7443315089913995, + "grad_norm": 0.3487924337387085, + "learning_rate": 7.909036452086285e-05, + "loss": 0.1415, + "step": 952 + }, + { + "epoch": 0.745113369820172, + "grad_norm": 0.3488902449607849, + "learning_rate": 7.903483916185654e-05, + "loss": 0.1466, + "step": 953 + }, + { + "epoch": 0.7458952306489445, + "grad_norm": 0.4646369516849518, + "learning_rate": 7.897925972636406e-05, + "loss": 0.1191, + "step": 954 + }, + { + "epoch": 0.746677091477717, + "grad_norm": 0.33017170429229736, + "learning_rate": 7.892362631790035e-05, + "loss": 0.1032, + "step": 955 + }, + { + "epoch": 0.7474589523064894, + "grad_norm": 0.39223426580429077, + "learning_rate": 7.886793904008094e-05, + "loss": 0.1137, + "step": 956 + }, + { + "epoch": 0.7482408131352619, + "grad_norm": 0.30325499176979065, + "learning_rate": 7.881219799662164e-05, + "loss": 0.1137, + "step": 957 + }, + { + "epoch": 0.7490226739640344, + "grad_norm": 0.4288765788078308, + "learning_rate": 7.87564032913384e-05, + "loss": 0.1458, + "step": 958 + }, + { + "epoch": 0.7498045347928068, + "grad_norm": 0.2882708013057709, + "learning_rate": 7.870055502814714e-05, + "loss": 0.1178, + "step": 959 + }, + { + "epoch": 0.7505863956215794, + "grad_norm": 0.36212223768234253, + "learning_rate": 7.864465331106349e-05, + "loss": 0.1219, + "step": 960 + }, + { + "epoch": 0.7513682564503519, + "grad_norm": 0.41355687379837036, + "learning_rate": 7.858869824420272e-05, + "loss": 0.1486, + "step": 961 + }, + { + "epoch": 0.7521501172791243, + "grad_norm": 0.36152246594429016, + "learning_rate": 7.853268993177936e-05, + "loss": 0.1297, + "step": 962 + }, + { + "epoch": 0.7529319781078968, + "grad_norm": 0.4029439389705658, + "learning_rate": 7.847662847810713e-05, + "loss": 0.1045, + "step": 963 + }, + { + "epoch": 0.7537138389366693, + "grad_norm": 0.29506799578666687, + "learning_rate": 7.84205139875988e-05, + "loss": 0.1051, + "step": 964 + }, + { + "epoch": 0.7544956997654417, + "grad_norm": 0.3722326457500458, + "learning_rate": 7.836434656476583e-05, + "loss": 0.1122, + "step": 965 + }, + { + "epoch": 0.7552775605942142, + "grad_norm": 0.326231449842453, + "learning_rate": 7.830812631421833e-05, + "loss": 0.1399, + "step": 966 + }, + { + "epoch": 0.7560594214229867, + "grad_norm": 0.3228454291820526, + "learning_rate": 7.825185334066475e-05, + "loss": 0.0995, + "step": 967 + }, + { + "epoch": 0.7568412822517592, + "grad_norm": 0.4981108605861664, + "learning_rate": 7.819552774891174e-05, + "loss": 0.1363, + "step": 968 + }, + { + "epoch": 0.7576231430805317, + "grad_norm": 0.27120697498321533, + "learning_rate": 7.813914964386401e-05, + "loss": 0.0893, + "step": 969 + }, + { + "epoch": 0.7584050039093041, + "grad_norm": 0.31169626116752625, + "learning_rate": 7.808271913052398e-05, + "loss": 0.1364, + "step": 970 + }, + { + "epoch": 0.7591868647380766, + "grad_norm": 0.3545890152454376, + "learning_rate": 7.802623631399176e-05, + "loss": 0.1163, + "step": 971 + }, + { + "epoch": 0.7599687255668491, + "grad_norm": 0.3600348234176636, + "learning_rate": 7.796970129946484e-05, + "loss": 0.1292, + "step": 972 + }, + { + "epoch": 0.7607505863956215, + "grad_norm": 0.3808040916919708, + "learning_rate": 7.791311419223791e-05, + "loss": 0.1168, + "step": 973 + }, + { + "epoch": 0.7615324472243941, + "grad_norm": 0.44746145606040955, + "learning_rate": 7.785647509770272e-05, + "loss": 0.1267, + "step": 974 + }, + { + "epoch": 0.7623143080531666, + "grad_norm": 0.35534608364105225, + "learning_rate": 7.779978412134783e-05, + "loss": 0.1183, + "step": 975 + }, + { + "epoch": 0.763096168881939, + "grad_norm": 0.34301894903182983, + "learning_rate": 7.77430413687584e-05, + "loss": 0.1046, + "step": 976 + }, + { + "epoch": 0.7638780297107115, + "grad_norm": 0.4878581166267395, + "learning_rate": 7.768624694561604e-05, + "loss": 0.1592, + "step": 977 + }, + { + "epoch": 0.764659890539484, + "grad_norm": 0.43463602662086487, + "learning_rate": 7.762940095769861e-05, + "loss": 0.1213, + "step": 978 + }, + { + "epoch": 0.7654417513682564, + "grad_norm": 0.36322781443595886, + "learning_rate": 7.757250351088004e-05, + "loss": 0.1183, + "step": 979 + }, + { + "epoch": 0.766223612197029, + "grad_norm": 0.28254207968711853, + "learning_rate": 7.751555471113e-05, + "loss": 0.1063, + "step": 980 + }, + { + "epoch": 0.7670054730258014, + "grad_norm": 0.3453584313392639, + "learning_rate": 7.745855466451385e-05, + "loss": 0.1257, + "step": 981 + }, + { + "epoch": 0.7677873338545739, + "grad_norm": 0.35417473316192627, + "learning_rate": 7.740150347719246e-05, + "loss": 0.1084, + "step": 982 + }, + { + "epoch": 0.7685691946833464, + "grad_norm": 0.2651992440223694, + "learning_rate": 7.734440125542186e-05, + "loss": 0.0937, + "step": 983 + }, + { + "epoch": 0.7693510555121188, + "grad_norm": 0.41208094358444214, + "learning_rate": 7.728724810555317e-05, + "loss": 0.1358, + "step": 984 + }, + { + "epoch": 0.7701329163408913, + "grad_norm": 0.3765069246292114, + "learning_rate": 7.723004413403238e-05, + "loss": 0.1067, + "step": 985 + }, + { + "epoch": 0.7709147771696638, + "grad_norm": 0.3152915835380554, + "learning_rate": 7.717278944740007e-05, + "loss": 0.1083, + "step": 986 + }, + { + "epoch": 0.7716966379984362, + "grad_norm": 0.38685888051986694, + "learning_rate": 7.711548415229135e-05, + "loss": 0.1395, + "step": 987 + }, + { + "epoch": 0.7724784988272088, + "grad_norm": 0.3208830654621124, + "learning_rate": 7.705812835543553e-05, + "loss": 0.1318, + "step": 988 + }, + { + "epoch": 0.7732603596559813, + "grad_norm": 0.4066202938556671, + "learning_rate": 7.700072216365602e-05, + "loss": 0.1192, + "step": 989 + }, + { + "epoch": 0.7740422204847537, + "grad_norm": 0.3345940113067627, + "learning_rate": 7.694326568387007e-05, + "loss": 0.1169, + "step": 990 + }, + { + "epoch": 0.7748240813135262, + "grad_norm": 0.3916303813457489, + "learning_rate": 7.688575902308854e-05, + "loss": 0.1185, + "step": 991 + }, + { + "epoch": 0.7756059421422987, + "grad_norm": 0.39679235219955444, + "learning_rate": 7.682820228841588e-05, + "loss": 0.1277, + "step": 992 + }, + { + "epoch": 0.7763878029710711, + "grad_norm": 0.3425503075122833, + "learning_rate": 7.677059558704965e-05, + "loss": 0.1219, + "step": 993 + }, + { + "epoch": 0.7771696637998436, + "grad_norm": 0.2682630121707916, + "learning_rate": 7.671293902628058e-05, + "loss": 0.12, + "step": 994 + }, + { + "epoch": 0.777951524628616, + "grad_norm": 0.4910065233707428, + "learning_rate": 7.665523271349221e-05, + "loss": 0.1214, + "step": 995 + }, + { + "epoch": 0.7787333854573886, + "grad_norm": 0.3416655361652374, + "learning_rate": 7.659747675616075e-05, + "loss": 0.1107, + "step": 996 + }, + { + "epoch": 0.7795152462861611, + "grad_norm": 0.47088661789894104, + "learning_rate": 7.65396712618549e-05, + "loss": 0.1483, + "step": 997 + }, + { + "epoch": 0.7802971071149335, + "grad_norm": 0.36356088519096375, + "learning_rate": 7.648181633823559e-05, + "loss": 0.1473, + "step": 998 + }, + { + "epoch": 0.781078967943706, + "grad_norm": 0.4040223956108093, + "learning_rate": 7.642391209305581e-05, + "loss": 0.1234, + "step": 999 + }, + { + "epoch": 0.7818608287724785, + "grad_norm": 0.27312490344047546, + "learning_rate": 7.636595863416041e-05, + "loss": 0.0953, + "step": 1000 + }, + { + "epoch": 0.7818608287724785, + "eval_loss": 0.13331574201583862, + "eval_runtime": 13.3827, + "eval_samples_per_second": 3.886, + "eval_steps_per_second": 0.971, + "step": 1000 + }, + { + "epoch": 0.7826426896012509, + "grad_norm": 0.4011226296424866, + "learning_rate": 7.630795606948592e-05, + "loss": 0.1279, + "step": 1001 + }, + { + "epoch": 0.7834245504300235, + "grad_norm": 0.2801114320755005, + "learning_rate": 7.624990450706034e-05, + "loss": 0.0881, + "step": 1002 + }, + { + "epoch": 0.784206411258796, + "grad_norm": 0.3331213891506195, + "learning_rate": 7.619180405500284e-05, + "loss": 0.1109, + "step": 1003 + }, + { + "epoch": 0.7849882720875684, + "grad_norm": 0.27233004570007324, + "learning_rate": 7.613365482152374e-05, + "loss": 0.1254, + "step": 1004 + }, + { + "epoch": 0.7857701329163409, + "grad_norm": 0.3889213502407074, + "learning_rate": 7.607545691492421e-05, + "loss": 0.1247, + "step": 1005 + }, + { + "epoch": 0.7865519937451134, + "grad_norm": 0.31572502851486206, + "learning_rate": 7.601721044359601e-05, + "loss": 0.0833, + "step": 1006 + }, + { + "epoch": 0.7873338545738858, + "grad_norm": 0.3308155834674835, + "learning_rate": 7.595891551602139e-05, + "loss": 0.0946, + "step": 1007 + }, + { + "epoch": 0.7881157154026583, + "grad_norm": 0.3063320219516754, + "learning_rate": 7.590057224077285e-05, + "loss": 0.1078, + "step": 1008 + }, + { + "epoch": 0.7888975762314308, + "grad_norm": 0.38767197728157043, + "learning_rate": 7.584218072651291e-05, + "loss": 0.1363, + "step": 1009 + }, + { + "epoch": 0.7896794370602033, + "grad_norm": 0.40377962589263916, + "learning_rate": 7.578374108199396e-05, + "loss": 0.1376, + "step": 1010 + }, + { + "epoch": 0.7904612978889758, + "grad_norm": 0.3951176106929779, + "learning_rate": 7.572525341605805e-05, + "loss": 0.1108, + "step": 1011 + }, + { + "epoch": 0.7912431587177482, + "grad_norm": 0.28525015711784363, + "learning_rate": 7.56667178376366e-05, + "loss": 0.1247, + "step": 1012 + }, + { + "epoch": 0.7920250195465207, + "grad_norm": 0.36067599058151245, + "learning_rate": 7.560813445575032e-05, + "loss": 0.1015, + "step": 1013 + }, + { + "epoch": 0.7928068803752932, + "grad_norm": 0.31828776001930237, + "learning_rate": 7.554950337950895e-05, + "loss": 0.1138, + "step": 1014 + }, + { + "epoch": 0.7935887412040656, + "grad_norm": 0.36967208981513977, + "learning_rate": 7.549082471811105e-05, + "loss": 0.1488, + "step": 1015 + }, + { + "epoch": 0.7943706020328382, + "grad_norm": 0.3618142604827881, + "learning_rate": 7.54320985808438e-05, + "loss": 0.1043, + "step": 1016 + }, + { + "epoch": 0.7951524628616107, + "grad_norm": 0.3296690583229065, + "learning_rate": 7.53733250770828e-05, + "loss": 0.1256, + "step": 1017 + }, + { + "epoch": 0.7959343236903831, + "grad_norm": 0.4782399833202362, + "learning_rate": 7.531450431629188e-05, + "loss": 0.1331, + "step": 1018 + }, + { + "epoch": 0.7967161845191556, + "grad_norm": 0.3915999233722687, + "learning_rate": 7.525563640802286e-05, + "loss": 0.1544, + "step": 1019 + }, + { + "epoch": 0.7974980453479281, + "grad_norm": 0.4172796905040741, + "learning_rate": 7.519672146191543e-05, + "loss": 0.1196, + "step": 1020 + }, + { + "epoch": 0.7982799061767005, + "grad_norm": 0.4924179017543793, + "learning_rate": 7.513775958769683e-05, + "loss": 0.1326, + "step": 1021 + }, + { + "epoch": 0.799061767005473, + "grad_norm": 0.32305261492729187, + "learning_rate": 7.50787508951817e-05, + "loss": 0.1138, + "step": 1022 + }, + { + "epoch": 0.7998436278342455, + "grad_norm": 0.2907930314540863, + "learning_rate": 7.501969549427195e-05, + "loss": 0.1189, + "step": 1023 + }, + { + "epoch": 0.800625488663018, + "grad_norm": 0.4041495621204376, + "learning_rate": 7.496059349495636e-05, + "loss": 0.1088, + "step": 1024 + }, + { + "epoch": 0.8014073494917905, + "grad_norm": 0.342549592256546, + "learning_rate": 7.49014450073106e-05, + "loss": 0.1229, + "step": 1025 + }, + { + "epoch": 0.8021892103205629, + "grad_norm": 0.3912782073020935, + "learning_rate": 7.484225014149691e-05, + "loss": 0.122, + "step": 1026 + }, + { + "epoch": 0.8029710711493354, + "grad_norm": 0.38920527696609497, + "learning_rate": 7.478300900776387e-05, + "loss": 0.1185, + "step": 1027 + }, + { + "epoch": 0.8037529319781079, + "grad_norm": 0.38123801350593567, + "learning_rate": 7.472372171644626e-05, + "loss": 0.151, + "step": 1028 + }, + { + "epoch": 0.8045347928068803, + "grad_norm": 0.2902073860168457, + "learning_rate": 7.46643883779648e-05, + "loss": 0.1127, + "step": 1029 + }, + { + "epoch": 0.8053166536356529, + "grad_norm": 0.3496885299682617, + "learning_rate": 7.460500910282602e-05, + "loss": 0.1319, + "step": 1030 + }, + { + "epoch": 0.8060985144644254, + "grad_norm": 0.32722875475883484, + "learning_rate": 7.454558400162195e-05, + "loss": 0.0927, + "step": 1031 + }, + { + "epoch": 0.8068803752931978, + "grad_norm": 0.3707050383090973, + "learning_rate": 7.448611318503001e-05, + "loss": 0.1008, + "step": 1032 + }, + { + "epoch": 0.8076622361219703, + "grad_norm": 0.3069707453250885, + "learning_rate": 7.442659676381275e-05, + "loss": 0.1218, + "step": 1033 + }, + { + "epoch": 0.8084440969507428, + "grad_norm": 0.4122169613838196, + "learning_rate": 7.436703484881761e-05, + "loss": 0.13, + "step": 1034 + }, + { + "epoch": 0.8092259577795152, + "grad_norm": 0.39322778582572937, + "learning_rate": 7.430742755097689e-05, + "loss": 0.1239, + "step": 1035 + }, + { + "epoch": 0.8100078186082877, + "grad_norm": 0.35709503293037415, + "learning_rate": 7.424777498130727e-05, + "loss": 0.116, + "step": 1036 + }, + { + "epoch": 0.8107896794370602, + "grad_norm": 0.3031849265098572, + "learning_rate": 7.418807725090983e-05, + "loss": 0.098, + "step": 1037 + }, + { + "epoch": 0.8115715402658327, + "grad_norm": 0.30318596959114075, + "learning_rate": 7.412833447096973e-05, + "loss": 0.1031, + "step": 1038 + }, + { + "epoch": 0.8123534010946052, + "grad_norm": 0.24984782934188843, + "learning_rate": 7.406854675275605e-05, + "loss": 0.1119, + "step": 1039 + }, + { + "epoch": 0.8131352619233776, + "grad_norm": 0.3230637013912201, + "learning_rate": 7.400871420762154e-05, + "loss": 0.1415, + "step": 1040 + }, + { + "epoch": 0.8139171227521501, + "grad_norm": 0.40312686562538147, + "learning_rate": 7.39488369470025e-05, + "loss": 0.1162, + "step": 1041 + }, + { + "epoch": 0.8146989835809226, + "grad_norm": 0.3000973165035248, + "learning_rate": 7.388891508241842e-05, + "loss": 0.1341, + "step": 1042 + }, + { + "epoch": 0.815480844409695, + "grad_norm": 0.3631311357021332, + "learning_rate": 7.382894872547195e-05, + "loss": 0.1275, + "step": 1043 + }, + { + "epoch": 0.8162627052384676, + "grad_norm": 0.2743859887123108, + "learning_rate": 7.376893798784853e-05, + "loss": 0.0804, + "step": 1044 + }, + { + "epoch": 0.8170445660672401, + "grad_norm": 0.3590327203273773, + "learning_rate": 7.370888298131633e-05, + "loss": 0.1, + "step": 1045 + }, + { + "epoch": 0.8178264268960125, + "grad_norm": 0.42224350571632385, + "learning_rate": 7.36487838177259e-05, + "loss": 0.134, + "step": 1046 + }, + { + "epoch": 0.818608287724785, + "grad_norm": 0.3597356975078583, + "learning_rate": 7.35886406090101e-05, + "loss": 0.117, + "step": 1047 + }, + { + "epoch": 0.8193901485535575, + "grad_norm": 0.30071374773979187, + "learning_rate": 7.352845346718378e-05, + "loss": 0.103, + "step": 1048 + }, + { + "epoch": 0.8201720093823299, + "grad_norm": 0.33568722009658813, + "learning_rate": 7.34682225043436e-05, + "loss": 0.1102, + "step": 1049 + }, + { + "epoch": 0.8209538702111024, + "grad_norm": 0.3519073724746704, + "learning_rate": 7.340794783266787e-05, + "loss": 0.1545, + "step": 1050 + }, + { + "epoch": 0.8217357310398749, + "grad_norm": 0.3975318968296051, + "learning_rate": 7.334762956441632e-05, + "loss": 0.1136, + "step": 1051 + }, + { + "epoch": 0.8225175918686474, + "grad_norm": 0.3154171109199524, + "learning_rate": 7.328726781192981e-05, + "loss": 0.1, + "step": 1052 + }, + { + "epoch": 0.8232994526974199, + "grad_norm": 0.3593648076057434, + "learning_rate": 7.322686268763026e-05, + "loss": 0.1115, + "step": 1053 + }, + { + "epoch": 0.8240813135261923, + "grad_norm": 0.40494292974472046, + "learning_rate": 7.31664143040203e-05, + "loss": 0.1296, + "step": 1054 + }, + { + "epoch": 0.8248631743549648, + "grad_norm": 0.3605891764163971, + "learning_rate": 7.310592277368322e-05, + "loss": 0.1316, + "step": 1055 + }, + { + "epoch": 0.8256450351837373, + "grad_norm": 0.389142245054245, + "learning_rate": 7.304538820928257e-05, + "loss": 0.0992, + "step": 1056 + }, + { + "epoch": 0.8264268960125097, + "grad_norm": 0.3127676844596863, + "learning_rate": 7.298481072356214e-05, + "loss": 0.0949, + "step": 1057 + }, + { + "epoch": 0.8272087568412823, + "grad_norm": 0.2602942883968353, + "learning_rate": 7.292419042934556e-05, + "loss": 0.1101, + "step": 1058 + }, + { + "epoch": 0.8279906176700548, + "grad_norm": 0.40910831093788147, + "learning_rate": 7.28635274395363e-05, + "loss": 0.1544, + "step": 1059 + }, + { + "epoch": 0.8287724784988272, + "grad_norm": 0.38308101892471313, + "learning_rate": 7.280282186711726e-05, + "loss": 0.1241, + "step": 1060 + }, + { + "epoch": 0.8295543393275997, + "grad_norm": 0.2836664021015167, + "learning_rate": 7.274207382515071e-05, + "loss": 0.1309, + "step": 1061 + }, + { + "epoch": 0.8303362001563722, + "grad_norm": 0.3716040551662445, + "learning_rate": 7.268128342677796e-05, + "loss": 0.1232, + "step": 1062 + }, + { + "epoch": 0.8311180609851446, + "grad_norm": 0.4505487382411957, + "learning_rate": 7.262045078521924e-05, + "loss": 0.1465, + "step": 1063 + }, + { + "epoch": 0.8318999218139171, + "grad_norm": 0.45818236470222473, + "learning_rate": 7.25595760137735e-05, + "loss": 0.1318, + "step": 1064 + }, + { + "epoch": 0.8326817826426895, + "grad_norm": 0.33937686681747437, + "learning_rate": 7.249865922581807e-05, + "loss": 0.1104, + "step": 1065 + }, + { + "epoch": 0.8334636434714621, + "grad_norm": 0.5324069857597351, + "learning_rate": 7.243770053480859e-05, + "loss": 0.1084, + "step": 1066 + }, + { + "epoch": 0.8342455043002346, + "grad_norm": 0.3447381854057312, + "learning_rate": 7.237670005427872e-05, + "loss": 0.0932, + "step": 1067 + }, + { + "epoch": 0.835027365129007, + "grad_norm": 0.445389062166214, + "learning_rate": 7.231565789783997e-05, + "loss": 0.1026, + "step": 1068 + }, + { + "epoch": 0.8358092259577795, + "grad_norm": 0.33196380734443665, + "learning_rate": 7.225457417918144e-05, + "loss": 0.1102, + "step": 1069 + }, + { + "epoch": 0.836591086786552, + "grad_norm": 0.37734511494636536, + "learning_rate": 7.21934490120697e-05, + "loss": 0.0999, + "step": 1070 + }, + { + "epoch": 0.8373729476153244, + "grad_norm": 0.4131399095058441, + "learning_rate": 7.213228251034844e-05, + "loss": 0.1125, + "step": 1071 + }, + { + "epoch": 0.838154808444097, + "grad_norm": 0.4269322454929352, + "learning_rate": 7.207107478793838e-05, + "loss": 0.1709, + "step": 1072 + }, + { + "epoch": 0.8389366692728695, + "grad_norm": 0.4380016624927521, + "learning_rate": 7.2009825958837e-05, + "loss": 0.1252, + "step": 1073 + }, + { + "epoch": 0.8397185301016419, + "grad_norm": 0.3470591902732849, + "learning_rate": 7.194853613711835e-05, + "loss": 0.1178, + "step": 1074 + }, + { + "epoch": 0.8405003909304144, + "grad_norm": 0.4182756841182709, + "learning_rate": 7.188720543693283e-05, + "loss": 0.1041, + "step": 1075 + }, + { + "epoch": 0.8412822517591869, + "grad_norm": 0.41240689158439636, + "learning_rate": 7.182583397250692e-05, + "loss": 0.1316, + "step": 1076 + }, + { + "epoch": 0.8420641125879593, + "grad_norm": 0.42284348607063293, + "learning_rate": 7.176442185814312e-05, + "loss": 0.114, + "step": 1077 + }, + { + "epoch": 0.8428459734167318, + "grad_norm": 0.2716412842273712, + "learning_rate": 7.170296920821953e-05, + "loss": 0.1044, + "step": 1078 + }, + { + "epoch": 0.8436278342455042, + "grad_norm": 0.42033228278160095, + "learning_rate": 7.164147613718986e-05, + "loss": 0.1388, + "step": 1079 + }, + { + "epoch": 0.8444096950742768, + "grad_norm": 0.4796225130558014, + "learning_rate": 7.157994275958302e-05, + "loss": 0.1491, + "step": 1080 + }, + { + "epoch": 0.8451915559030493, + "grad_norm": 0.26454976201057434, + "learning_rate": 7.151836919000299e-05, + "loss": 0.0708, + "step": 1081 + }, + { + "epoch": 0.8459734167318217, + "grad_norm": 0.3553334176540375, + "learning_rate": 7.145675554312866e-05, + "loss": 0.1236, + "step": 1082 + }, + { + "epoch": 0.8467552775605942, + "grad_norm": 0.41896483302116394, + "learning_rate": 7.139510193371352e-05, + "loss": 0.1523, + "step": 1083 + }, + { + "epoch": 0.8475371383893667, + "grad_norm": 0.3911309838294983, + "learning_rate": 7.133340847658553e-05, + "loss": 0.1123, + "step": 1084 + }, + { + "epoch": 0.8483189992181391, + "grad_norm": 0.3871746361255646, + "learning_rate": 7.127167528664682e-05, + "loss": 0.1448, + "step": 1085 + }, + { + "epoch": 0.8491008600469117, + "grad_norm": 0.38101163506507874, + "learning_rate": 7.120990247887351e-05, + "loss": 0.1435, + "step": 1086 + }, + { + "epoch": 0.8498827208756842, + "grad_norm": 0.3440212309360504, + "learning_rate": 7.114809016831558e-05, + "loss": 0.111, + "step": 1087 + }, + { + "epoch": 0.8506645817044566, + "grad_norm": 0.34465304017066956, + "learning_rate": 7.108623847009651e-05, + "loss": 0.1375, + "step": 1088 + }, + { + "epoch": 0.8514464425332291, + "grad_norm": 0.3100002706050873, + "learning_rate": 7.10243474994132e-05, + "loss": 0.0996, + "step": 1089 + }, + { + "epoch": 0.8522283033620016, + "grad_norm": 0.2955577075481415, + "learning_rate": 7.096241737153562e-05, + "loss": 0.0878, + "step": 1090 + }, + { + "epoch": 0.853010164190774, + "grad_norm": 0.42580050230026245, + "learning_rate": 7.090044820180673e-05, + "loss": 0.0961, + "step": 1091 + }, + { + "epoch": 0.8537920250195465, + "grad_norm": 0.34347400069236755, + "learning_rate": 7.083844010564219e-05, + "loss": 0.1231, + "step": 1092 + }, + { + "epoch": 0.854573885848319, + "grad_norm": 0.3110819160938263, + "learning_rate": 7.077639319853013e-05, + "loss": 0.0847, + "step": 1093 + }, + { + "epoch": 0.8553557466770915, + "grad_norm": 0.4496898353099823, + "learning_rate": 7.071430759603103e-05, + "loss": 0.1247, + "step": 1094 + }, + { + "epoch": 0.856137607505864, + "grad_norm": 0.3124215602874756, + "learning_rate": 7.065218341377734e-05, + "loss": 0.0917, + "step": 1095 + }, + { + "epoch": 0.8569194683346364, + "grad_norm": 0.3862299621105194, + "learning_rate": 7.059002076747348e-05, + "loss": 0.103, + "step": 1096 + }, + { + "epoch": 0.8577013291634089, + "grad_norm": 0.36825454235076904, + "learning_rate": 7.05278197728954e-05, + "loss": 0.0993, + "step": 1097 + }, + { + "epoch": 0.8584831899921814, + "grad_norm": 0.4220404028892517, + "learning_rate": 7.046558054589055e-05, + "loss": 0.1358, + "step": 1098 + }, + { + "epoch": 0.8592650508209538, + "grad_norm": 0.39067772030830383, + "learning_rate": 7.040330320237752e-05, + "loss": 0.1061, + "step": 1099 + }, + { + "epoch": 0.8600469116497264, + "grad_norm": 0.27759233117103577, + "learning_rate": 7.034098785834598e-05, + "loss": 0.1062, + "step": 1100 + }, + { + "epoch": 0.8608287724784989, + "grad_norm": 0.3399001657962799, + "learning_rate": 7.027863462985628e-05, + "loss": 0.1167, + "step": 1101 + }, + { + "epoch": 0.8616106333072713, + "grad_norm": 0.43628373742103577, + "learning_rate": 7.02162436330394e-05, + "loss": 0.1563, + "step": 1102 + }, + { + "epoch": 0.8623924941360438, + "grad_norm": 0.5306029319763184, + "learning_rate": 7.015381498409661e-05, + "loss": 0.1653, + "step": 1103 + }, + { + "epoch": 0.8631743549648163, + "grad_norm": 0.3642573356628418, + "learning_rate": 7.009134879929936e-05, + "loss": 0.1072, + "step": 1104 + }, + { + "epoch": 0.8639562157935887, + "grad_norm": 0.3719395399093628, + "learning_rate": 7.002884519498895e-05, + "loss": 0.1017, + "step": 1105 + }, + { + "epoch": 0.8647380766223612, + "grad_norm": 0.4951043426990509, + "learning_rate": 6.99663042875764e-05, + "loss": 0.1516, + "step": 1106 + }, + { + "epoch": 0.8655199374511336, + "grad_norm": 0.4145095646381378, + "learning_rate": 6.990372619354224e-05, + "loss": 0.1188, + "step": 1107 + }, + { + "epoch": 0.8663017982799062, + "grad_norm": 0.31760677695274353, + "learning_rate": 6.98411110294362e-05, + "loss": 0.0957, + "step": 1108 + }, + { + "epoch": 0.8670836591086787, + "grad_norm": 0.48377615213394165, + "learning_rate": 6.977845891187708e-05, + "loss": 0.142, + "step": 1109 + }, + { + "epoch": 0.8678655199374511, + "grad_norm": 0.3468191921710968, + "learning_rate": 6.971576995755253e-05, + "loss": 0.0911, + "step": 1110 + }, + { + "epoch": 0.8686473807662236, + "grad_norm": 0.38117656111717224, + "learning_rate": 6.965304428321874e-05, + "loss": 0.1185, + "step": 1111 + }, + { + "epoch": 0.8694292415949961, + "grad_norm": 0.4549887478351593, + "learning_rate": 6.959028200570034e-05, + "loss": 0.1416, + "step": 1112 + }, + { + "epoch": 0.8702111024237685, + "grad_norm": 0.3793928027153015, + "learning_rate": 6.952748324189016e-05, + "loss": 0.1584, + "step": 1113 + }, + { + "epoch": 0.870992963252541, + "grad_norm": 0.430199533700943, + "learning_rate": 6.94646481087489e-05, + "loss": 0.1863, + "step": 1114 + }, + { + "epoch": 0.8717748240813136, + "grad_norm": 0.37007904052734375, + "learning_rate": 6.940177672330508e-05, + "loss": 0.1413, + "step": 1115 + }, + { + "epoch": 0.872556684910086, + "grad_norm": 0.29991209506988525, + "learning_rate": 6.933886920265468e-05, + "loss": 0.1143, + "step": 1116 + }, + { + "epoch": 0.8733385457388585, + "grad_norm": 0.36572265625, + "learning_rate": 6.9275925663961e-05, + "loss": 0.1286, + "step": 1117 + }, + { + "epoch": 0.874120406567631, + "grad_norm": 0.43926042318344116, + "learning_rate": 6.921294622445444e-05, + "loss": 0.1329, + "step": 1118 + }, + { + "epoch": 0.8749022673964034, + "grad_norm": 0.35618895292282104, + "learning_rate": 6.914993100143224e-05, + "loss": 0.1193, + "step": 1119 + }, + { + "epoch": 0.8756841282251759, + "grad_norm": 0.3571595847606659, + "learning_rate": 6.908688011225831e-05, + "loss": 0.105, + "step": 1120 + }, + { + "epoch": 0.8764659890539483, + "grad_norm": 0.40551918745040894, + "learning_rate": 6.902379367436296e-05, + "loss": 0.1064, + "step": 1121 + }, + { + "epoch": 0.8772478498827209, + "grad_norm": 0.33147695660591125, + "learning_rate": 6.896067180524273e-05, + "loss": 0.091, + "step": 1122 + }, + { + "epoch": 0.8780297107114934, + "grad_norm": 0.3313888609409332, + "learning_rate": 6.889751462246013e-05, + "loss": 0.1345, + "step": 1123 + }, + { + "epoch": 0.8788115715402658, + "grad_norm": 0.4676271378993988, + "learning_rate": 6.883432224364346e-05, + "loss": 0.1433, + "step": 1124 + }, + { + "epoch": 0.8795934323690383, + "grad_norm": 0.3496025502681732, + "learning_rate": 6.877109478648656e-05, + "loss": 0.1565, + "step": 1125 + }, + { + "epoch": 0.8803752931978108, + "grad_norm": 0.3045591115951538, + "learning_rate": 6.870783236874861e-05, + "loss": 0.1122, + "step": 1126 + }, + { + "epoch": 0.8811571540265832, + "grad_norm": 0.39115655422210693, + "learning_rate": 6.864453510825388e-05, + "loss": 0.1323, + "step": 1127 + }, + { + "epoch": 0.8819390148553558, + "grad_norm": 0.3827655613422394, + "learning_rate": 6.85812031228916e-05, + "loss": 0.1326, + "step": 1128 + }, + { + "epoch": 0.8827208756841283, + "grad_norm": 0.4689268469810486, + "learning_rate": 6.851783653061555e-05, + "loss": 0.1199, + "step": 1129 + }, + { + "epoch": 0.8835027365129007, + "grad_norm": 0.36318734288215637, + "learning_rate": 6.845443544944411e-05, + "loss": 0.0897, + "step": 1130 + }, + { + "epoch": 0.8842845973416732, + "grad_norm": 0.3593188524246216, + "learning_rate": 6.83909999974598e-05, + "loss": 0.118, + "step": 1131 + }, + { + "epoch": 0.8850664581704457, + "grad_norm": 0.2524106502532959, + "learning_rate": 6.832753029280913e-05, + "loss": 0.0675, + "step": 1132 + }, + { + "epoch": 0.8858483189992181, + "grad_norm": 0.3099701404571533, + "learning_rate": 6.826402645370256e-05, + "loss": 0.1163, + "step": 1133 + }, + { + "epoch": 0.8866301798279906, + "grad_norm": 0.4418666958808899, + "learning_rate": 6.820048859841393e-05, + "loss": 0.1369, + "step": 1134 + }, + { + "epoch": 0.887412040656763, + "grad_norm": 0.35631266236305237, + "learning_rate": 6.813691684528054e-05, + "loss": 0.0867, + "step": 1135 + }, + { + "epoch": 0.8881939014855356, + "grad_norm": 0.40591201186180115, + "learning_rate": 6.807331131270283e-05, + "loss": 0.124, + "step": 1136 + }, + { + "epoch": 0.8889757623143081, + "grad_norm": 0.4229952394962311, + "learning_rate": 6.800967211914409e-05, + "loss": 0.1235, + "step": 1137 + }, + { + "epoch": 0.8897576231430805, + "grad_norm": 0.4261278510093689, + "learning_rate": 6.794599938313041e-05, + "loss": 0.0969, + "step": 1138 + }, + { + "epoch": 0.890539483971853, + "grad_norm": 0.35794153809547424, + "learning_rate": 6.788229322325022e-05, + "loss": 0.1119, + "step": 1139 + }, + { + "epoch": 0.8913213448006255, + "grad_norm": 0.32628294825553894, + "learning_rate": 6.781855375815427e-05, + "loss": 0.094, + "step": 1140 + }, + { + "epoch": 0.8921032056293979, + "grad_norm": 0.4180498719215393, + "learning_rate": 6.775478110655535e-05, + "loss": 0.1502, + "step": 1141 + }, + { + "epoch": 0.8928850664581705, + "grad_norm": 0.28836068511009216, + "learning_rate": 6.769097538722806e-05, + "loss": 0.1117, + "step": 1142 + }, + { + "epoch": 0.893666927286943, + "grad_norm": 0.3085721731185913, + "learning_rate": 6.762713671900853e-05, + "loss": 0.0979, + "step": 1143 + }, + { + "epoch": 0.8944487881157154, + "grad_norm": 0.3328137993812561, + "learning_rate": 6.756326522079433e-05, + "loss": 0.1306, + "step": 1144 + }, + { + "epoch": 0.8952306489444879, + "grad_norm": 0.4145255386829376, + "learning_rate": 6.74993610115441e-05, + "loss": 0.1007, + "step": 1145 + }, + { + "epoch": 0.8960125097732604, + "grad_norm": 0.31838345527648926, + "learning_rate": 6.743542421027751e-05, + "loss": 0.1043, + "step": 1146 + }, + { + "epoch": 0.8967943706020328, + "grad_norm": 0.3162778317928314, + "learning_rate": 6.737145493607482e-05, + "loss": 0.0856, + "step": 1147 + }, + { + "epoch": 0.8975762314308053, + "grad_norm": 0.44549447298049927, + "learning_rate": 6.730745330807684e-05, + "loss": 0.1045, + "step": 1148 + }, + { + "epoch": 0.8983580922595777, + "grad_norm": 0.3551749885082245, + "learning_rate": 6.724341944548459e-05, + "loss": 0.1258, + "step": 1149 + }, + { + "epoch": 0.8991399530883503, + "grad_norm": 0.3578115999698639, + "learning_rate": 6.717935346755915e-05, + "loss": 0.0974, + "step": 1150 + }, + { + "epoch": 0.8999218139171228, + "grad_norm": 0.3265388607978821, + "learning_rate": 6.711525549362144e-05, + "loss": 0.0956, + "step": 1151 + }, + { + "epoch": 0.9007036747458952, + "grad_norm": 0.30331680178642273, + "learning_rate": 6.705112564305194e-05, + "loss": 0.1004, + "step": 1152 + }, + { + "epoch": 0.9014855355746677, + "grad_norm": 0.3490830063819885, + "learning_rate": 6.698696403529049e-05, + "loss": 0.1187, + "step": 1153 + }, + { + "epoch": 0.9022673964034402, + "grad_norm": 0.2992035448551178, + "learning_rate": 6.69227707898361e-05, + "loss": 0.0808, + "step": 1154 + }, + { + "epoch": 0.9030492572322126, + "grad_norm": 0.4635206460952759, + "learning_rate": 6.685854602624668e-05, + "loss": 0.1041, + "step": 1155 + }, + { + "epoch": 0.9038311180609852, + "grad_norm": 0.3826647102832794, + "learning_rate": 6.679428986413889e-05, + "loss": 0.0935, + "step": 1156 + }, + { + "epoch": 0.9046129788897577, + "grad_norm": 0.4464735984802246, + "learning_rate": 6.673000242318782e-05, + "loss": 0.1552, + "step": 1157 + }, + { + "epoch": 0.9053948397185301, + "grad_norm": 0.4731329083442688, + "learning_rate": 6.666568382312684e-05, + "loss": 0.119, + "step": 1158 + }, + { + "epoch": 0.9061767005473026, + "grad_norm": 0.4805479645729065, + "learning_rate": 6.660133418374732e-05, + "loss": 0.146, + "step": 1159 + }, + { + "epoch": 0.9069585613760751, + "grad_norm": 0.34081944823265076, + "learning_rate": 6.653695362489846e-05, + "loss": 0.1157, + "step": 1160 + }, + { + "epoch": 0.9077404222048475, + "grad_norm": 0.38758766651153564, + "learning_rate": 6.647254226648711e-05, + "loss": 0.1181, + "step": 1161 + }, + { + "epoch": 0.90852228303362, + "grad_norm": 0.29969069361686707, + "learning_rate": 6.640810022847738e-05, + "loss": 0.1082, + "step": 1162 + }, + { + "epoch": 0.9093041438623924, + "grad_norm": 0.3649740219116211, + "learning_rate": 6.634362763089056e-05, + "loss": 0.0969, + "step": 1163 + }, + { + "epoch": 0.910086004691165, + "grad_norm": 0.24078141152858734, + "learning_rate": 6.627912459380487e-05, + "loss": 0.0791, + "step": 1164 + }, + { + "epoch": 0.9108678655199375, + "grad_norm": 0.41361549496650696, + "learning_rate": 6.621459123735522e-05, + "loss": 0.1202, + "step": 1165 + }, + { + "epoch": 0.9116497263487099, + "grad_norm": 0.31104376912117004, + "learning_rate": 6.615002768173299e-05, + "loss": 0.0979, + "step": 1166 + }, + { + "epoch": 0.9124315871774824, + "grad_norm": 0.2964945435523987, + "learning_rate": 6.608543404718578e-05, + "loss": 0.1185, + "step": 1167 + }, + { + "epoch": 0.9132134480062549, + "grad_norm": 0.37027430534362793, + "learning_rate": 6.60208104540172e-05, + "loss": 0.1269, + "step": 1168 + }, + { + "epoch": 0.9139953088350273, + "grad_norm": 0.40137842297554016, + "learning_rate": 6.595615702258676e-05, + "loss": 0.127, + "step": 1169 + }, + { + "epoch": 0.9147771696637998, + "grad_norm": 0.41014501452445984, + "learning_rate": 6.589147387330939e-05, + "loss": 0.0935, + "step": 1170 + }, + { + "epoch": 0.9155590304925724, + "grad_norm": 0.3405829071998596, + "learning_rate": 6.58267611266555e-05, + "loss": 0.1261, + "step": 1171 + }, + { + "epoch": 0.9163408913213448, + "grad_norm": 0.32811757922172546, + "learning_rate": 6.576201890315058e-05, + "loss": 0.1491, + "step": 1172 + }, + { + "epoch": 0.9171227521501173, + "grad_norm": 0.3566417098045349, + "learning_rate": 6.569724732337495e-05, + "loss": 0.1167, + "step": 1173 + }, + { + "epoch": 0.9179046129788898, + "grad_norm": 0.31817594170570374, + "learning_rate": 6.563244650796371e-05, + "loss": 0.1016, + "step": 1174 + }, + { + "epoch": 0.9186864738076622, + "grad_norm": 0.291289359331131, + "learning_rate": 6.556761657760635e-05, + "loss": 0.0951, + "step": 1175 + }, + { + "epoch": 0.9194683346364347, + "grad_norm": 0.2821560204029083, + "learning_rate": 6.550275765304661e-05, + "loss": 0.0743, + "step": 1176 + }, + { + "epoch": 0.9202501954652071, + "grad_norm": 0.39526405930519104, + "learning_rate": 6.543786985508223e-05, + "loss": 0.1109, + "step": 1177 + }, + { + "epoch": 0.9210320562939797, + "grad_norm": 0.44412076473236084, + "learning_rate": 6.537295330456469e-05, + "loss": 0.1296, + "step": 1178 + }, + { + "epoch": 0.9218139171227522, + "grad_norm": 0.3970738649368286, + "learning_rate": 6.53080081223991e-05, + "loss": 0.1113, + "step": 1179 + }, + { + "epoch": 0.9225957779515246, + "grad_norm": 0.4147748351097107, + "learning_rate": 6.524303442954378e-05, + "loss": 0.1096, + "step": 1180 + }, + { + "epoch": 0.9233776387802971, + "grad_norm": 0.37349894642829895, + "learning_rate": 6.517803234701025e-05, + "loss": 0.1036, + "step": 1181 + }, + { + "epoch": 0.9241594996090696, + "grad_norm": 0.35803303122520447, + "learning_rate": 6.511300199586289e-05, + "loss": 0.1096, + "step": 1182 + }, + { + "epoch": 0.924941360437842, + "grad_norm": 0.5214382410049438, + "learning_rate": 6.504794349721866e-05, + "loss": 0.1325, + "step": 1183 + }, + { + "epoch": 0.9257232212666145, + "grad_norm": 0.40093794465065, + "learning_rate": 6.498285697224703e-05, + "loss": 0.1131, + "step": 1184 + }, + { + "epoch": 0.9265050820953871, + "grad_norm": 0.39705073833465576, + "learning_rate": 6.491774254216963e-05, + "loss": 0.109, + "step": 1185 + }, + { + "epoch": 0.9272869429241595, + "grad_norm": 0.3166132867336273, + "learning_rate": 6.485260032826005e-05, + "loss": 0.1104, + "step": 1186 + }, + { + "epoch": 0.928068803752932, + "grad_norm": 0.278113454580307, + "learning_rate": 6.478743045184365e-05, + "loss": 0.093, + "step": 1187 + }, + { + "epoch": 0.9288506645817045, + "grad_norm": 0.3798607587814331, + "learning_rate": 6.472223303429727e-05, + "loss": 0.1601, + "step": 1188 + }, + { + "epoch": 0.9296325254104769, + "grad_norm": 0.43413954973220825, + "learning_rate": 6.465700819704913e-05, + "loss": 0.1416, + "step": 1189 + }, + { + "epoch": 0.9304143862392494, + "grad_norm": 0.3416893780231476, + "learning_rate": 6.459175606157842e-05, + "loss": 0.1145, + "step": 1190 + }, + { + "epoch": 0.9311962470680218, + "grad_norm": 0.33215364813804626, + "learning_rate": 6.452647674941524e-05, + "loss": 0.1113, + "step": 1191 + }, + { + "epoch": 0.9319781078967944, + "grad_norm": 0.34538665413856506, + "learning_rate": 6.446117038214027e-05, + "loss": 0.09, + "step": 1192 + }, + { + "epoch": 0.9327599687255669, + "grad_norm": 0.4208803176879883, + "learning_rate": 6.439583708138459e-05, + "loss": 0.1198, + "step": 1193 + }, + { + "epoch": 0.9335418295543393, + "grad_norm": 0.35723745822906494, + "learning_rate": 6.433047696882945e-05, + "loss": 0.1337, + "step": 1194 + }, + { + "epoch": 0.9343236903831118, + "grad_norm": 0.1974477916955948, + "learning_rate": 6.426509016620603e-05, + "loss": 0.069, + "step": 1195 + }, + { + "epoch": 0.9351055512118843, + "grad_norm": 0.3032665252685547, + "learning_rate": 6.419967679529522e-05, + "loss": 0.1063, + "step": 1196 + }, + { + "epoch": 0.9358874120406567, + "grad_norm": 0.4309205114841461, + "learning_rate": 6.413423697792737e-05, + "loss": 0.1138, + "step": 1197 + }, + { + "epoch": 0.9366692728694292, + "grad_norm": 0.31886643171310425, + "learning_rate": 6.406877083598213e-05, + "loss": 0.1061, + "step": 1198 + }, + { + "epoch": 0.9374511336982018, + "grad_norm": 0.45376572012901306, + "learning_rate": 6.400327849138814e-05, + "loss": 0.1211, + "step": 1199 + }, + { + "epoch": 0.9382329945269742, + "grad_norm": 0.37083959579467773, + "learning_rate": 6.393776006612288e-05, + "loss": 0.1201, + "step": 1200 + }, + { + "epoch": 0.9382329945269742, + "eval_loss": 0.12676353752613068, + "eval_runtime": 13.3499, + "eval_samples_per_second": 3.895, + "eval_steps_per_second": 0.974, + "step": 1200 + }, + { + "epoch": 0.9390148553557467, + "grad_norm": 0.47506844997406006, + "learning_rate": 6.387221568221238e-05, + "loss": 0.1607, + "step": 1201 + }, + { + "epoch": 0.9397967161845192, + "grad_norm": 0.4289664030075073, + "learning_rate": 6.380664546173101e-05, + "loss": 0.1491, + "step": 1202 + }, + { + "epoch": 0.9405785770132916, + "grad_norm": 0.3842359185218811, + "learning_rate": 6.374104952680125e-05, + "loss": 0.1403, + "step": 1203 + }, + { + "epoch": 0.9413604378420641, + "grad_norm": 0.36879363656044006, + "learning_rate": 6.367542799959352e-05, + "loss": 0.1256, + "step": 1204 + }, + { + "epoch": 0.9421422986708365, + "grad_norm": 0.5137174725532532, + "learning_rate": 6.360978100232587e-05, + "loss": 0.1216, + "step": 1205 + }, + { + "epoch": 0.9429241594996091, + "grad_norm": 0.41332346200942993, + "learning_rate": 6.35441086572638e-05, + "loss": 0.1551, + "step": 1206 + }, + { + "epoch": 0.9437060203283816, + "grad_norm": 0.3252861201763153, + "learning_rate": 6.347841108672e-05, + "loss": 0.1116, + "step": 1207 + }, + { + "epoch": 0.944487881157154, + "grad_norm": 0.30747750401496887, + "learning_rate": 6.341268841305415e-05, + "loss": 0.1039, + "step": 1208 + }, + { + "epoch": 0.9452697419859265, + "grad_norm": 0.3144964575767517, + "learning_rate": 6.334694075867269e-05, + "loss": 0.1119, + "step": 1209 + }, + { + "epoch": 0.946051602814699, + "grad_norm": 0.31715381145477295, + "learning_rate": 6.328116824602859e-05, + "loss": 0.105, + "step": 1210 + }, + { + "epoch": 0.9468334636434714, + "grad_norm": 0.3612911105155945, + "learning_rate": 6.321537099762114e-05, + "loss": 0.1276, + "step": 1211 + }, + { + "epoch": 0.947615324472244, + "grad_norm": 0.40377581119537354, + "learning_rate": 6.314954913599563e-05, + "loss": 0.1227, + "step": 1212 + }, + { + "epoch": 0.9483971853010165, + "grad_norm": 0.35488733649253845, + "learning_rate": 6.308370278374325e-05, + "loss": 0.1183, + "step": 1213 + }, + { + "epoch": 0.9491790461297889, + "grad_norm": 0.4154433012008667, + "learning_rate": 6.301783206350076e-05, + "loss": 0.137, + "step": 1214 + }, + { + "epoch": 0.9499609069585614, + "grad_norm": 0.3620409667491913, + "learning_rate": 6.295193709795037e-05, + "loss": 0.1155, + "step": 1215 + }, + { + "epoch": 0.9507427677873339, + "grad_norm": 0.4153326451778412, + "learning_rate": 6.288601800981942e-05, + "loss": 0.1002, + "step": 1216 + }, + { + "epoch": 0.9515246286161063, + "grad_norm": 0.38747620582580566, + "learning_rate": 6.282007492188011e-05, + "loss": 0.1343, + "step": 1217 + }, + { + "epoch": 0.9523064894448788, + "grad_norm": 0.3720448911190033, + "learning_rate": 6.275410795694943e-05, + "loss": 0.0982, + "step": 1218 + }, + { + "epoch": 0.9530883502736512, + "grad_norm": 0.27825435996055603, + "learning_rate": 6.268811723788877e-05, + "loss": 0.0688, + "step": 1219 + }, + { + "epoch": 0.9538702111024238, + "grad_norm": 0.4155515730381012, + "learning_rate": 6.262210288760383e-05, + "loss": 0.1256, + "step": 1220 + }, + { + "epoch": 0.9546520719311963, + "grad_norm": 0.26842448115348816, + "learning_rate": 6.255606502904429e-05, + "loss": 0.1326, + "step": 1221 + }, + { + "epoch": 0.9554339327599687, + "grad_norm": 0.377493679523468, + "learning_rate": 6.249000378520358e-05, + "loss": 0.1258, + "step": 1222 + }, + { + "epoch": 0.9562157935887412, + "grad_norm": 0.3844210207462311, + "learning_rate": 6.242391927911872e-05, + "loss": 0.1402, + "step": 1223 + }, + { + "epoch": 0.9569976544175137, + "grad_norm": 0.4269979000091553, + "learning_rate": 6.235781163387007e-05, + "loss": 0.1253, + "step": 1224 + }, + { + "epoch": 0.9577795152462861, + "grad_norm": 0.4109889566898346, + "learning_rate": 6.229168097258106e-05, + "loss": 0.1343, + "step": 1225 + }, + { + "epoch": 0.9585613760750586, + "grad_norm": 0.3345010578632355, + "learning_rate": 6.2225527418418e-05, + "loss": 0.0976, + "step": 1226 + }, + { + "epoch": 0.9593432369038312, + "grad_norm": 0.45198965072631836, + "learning_rate": 6.21593510945898e-05, + "loss": 0.1088, + "step": 1227 + }, + { + "epoch": 0.9601250977326036, + "grad_norm": 0.3664204180240631, + "learning_rate": 6.209315212434783e-05, + "loss": 0.0907, + "step": 1228 + }, + { + "epoch": 0.9609069585613761, + "grad_norm": 0.3774789571762085, + "learning_rate": 6.202693063098561e-05, + "loss": 0.1208, + "step": 1229 + }, + { + "epoch": 0.9616888193901486, + "grad_norm": 0.3721179664134979, + "learning_rate": 6.196068673783863e-05, + "loss": 0.1295, + "step": 1230 + }, + { + "epoch": 0.962470680218921, + "grad_norm": 0.3367028534412384, + "learning_rate": 6.189442056828407e-05, + "loss": 0.1699, + "step": 1231 + }, + { + "epoch": 0.9632525410476935, + "grad_norm": 0.3180219531059265, + "learning_rate": 6.182813224574061e-05, + "loss": 0.1318, + "step": 1232 + }, + { + "epoch": 0.9640344018764659, + "grad_norm": 0.357023686170578, + "learning_rate": 6.176182189366819e-05, + "loss": 0.1029, + "step": 1233 + }, + { + "epoch": 0.9648162627052385, + "grad_norm": 0.34663912653923035, + "learning_rate": 6.169548963556779e-05, + "loss": 0.1013, + "step": 1234 + }, + { + "epoch": 0.965598123534011, + "grad_norm": 0.3682876527309418, + "learning_rate": 6.16291355949812e-05, + "loss": 0.0902, + "step": 1235 + }, + { + "epoch": 0.9663799843627834, + "grad_norm": 0.3583768904209137, + "learning_rate": 6.156275989549072e-05, + "loss": 0.092, + "step": 1236 + }, + { + "epoch": 0.9671618451915559, + "grad_norm": 0.3423864543437958, + "learning_rate": 6.149636266071904e-05, + "loss": 0.1074, + "step": 1237 + }, + { + "epoch": 0.9679437060203284, + "grad_norm": 0.42394763231277466, + "learning_rate": 6.142994401432896e-05, + "loss": 0.1143, + "step": 1238 + }, + { + "epoch": 0.9687255668491008, + "grad_norm": 0.3888026177883148, + "learning_rate": 6.136350408002314e-05, + "loss": 0.1147, + "step": 1239 + }, + { + "epoch": 0.9695074276778733, + "grad_norm": 0.3200819492340088, + "learning_rate": 6.12970429815439e-05, + "loss": 0.1197, + "step": 1240 + }, + { + "epoch": 0.9702892885066459, + "grad_norm": 0.37223726511001587, + "learning_rate": 6.123056084267296e-05, + "loss": 0.0876, + "step": 1241 + }, + { + "epoch": 0.9710711493354183, + "grad_norm": 0.39913463592529297, + "learning_rate": 6.116405778723123e-05, + "loss": 0.1182, + "step": 1242 + }, + { + "epoch": 0.9718530101641908, + "grad_norm": 0.47016170620918274, + "learning_rate": 6.109753393907862e-05, + "loss": 0.1375, + "step": 1243 + }, + { + "epoch": 0.9726348709929633, + "grad_norm": 0.38198941946029663, + "learning_rate": 6.1030989422113715e-05, + "loss": 0.1543, + "step": 1244 + }, + { + "epoch": 0.9734167318217357, + "grad_norm": 0.3025572597980499, + "learning_rate": 6.09644243602736e-05, + "loss": 0.0988, + "step": 1245 + }, + { + "epoch": 0.9741985926505082, + "grad_norm": 0.4157261848449707, + "learning_rate": 6.0897838877533644e-05, + "loss": 0.1167, + "step": 1246 + }, + { + "epoch": 0.9749804534792806, + "grad_norm": 0.5874128341674805, + "learning_rate": 6.0831233097907236e-05, + "loss": 0.1105, + "step": 1247 + }, + { + "epoch": 0.9757623143080532, + "grad_norm": 0.4182235300540924, + "learning_rate": 6.0764607145445576e-05, + "loss": 0.1205, + "step": 1248 + }, + { + "epoch": 0.9765441751368257, + "grad_norm": 0.41899439692497253, + "learning_rate": 6.069796114423743e-05, + "loss": 0.1544, + "step": 1249 + }, + { + "epoch": 0.9773260359655981, + "grad_norm": 0.3662065863609314, + "learning_rate": 6.0631295218408926e-05, + "loss": 0.1081, + "step": 1250 + }, + { + "epoch": 0.9781078967943706, + "grad_norm": 0.3846765458583832, + "learning_rate": 6.056460949212324e-05, + "loss": 0.1128, + "step": 1251 + }, + { + "epoch": 0.9788897576231431, + "grad_norm": 0.31117817759513855, + "learning_rate": 6.049790408958048e-05, + "loss": 0.1114, + "step": 1252 + }, + { + "epoch": 0.9796716184519155, + "grad_norm": 0.359997421503067, + "learning_rate": 6.043117913501741e-05, + "loss": 0.1078, + "step": 1253 + }, + { + "epoch": 0.980453479280688, + "grad_norm": 0.30968189239501953, + "learning_rate": 6.036443475270717e-05, + "loss": 0.1071, + "step": 1254 + }, + { + "epoch": 0.9812353401094606, + "grad_norm": 0.378490686416626, + "learning_rate": 6.029767106695909e-05, + "loss": 0.1085, + "step": 1255 + }, + { + "epoch": 0.982017200938233, + "grad_norm": 0.32061898708343506, + "learning_rate": 6.0230888202118473e-05, + "loss": 0.108, + "step": 1256 + }, + { + "epoch": 0.9827990617670055, + "grad_norm": 0.42287373542785645, + "learning_rate": 6.0164086282566326e-05, + "loss": 0.129, + "step": 1257 + }, + { + "epoch": 0.983580922595778, + "grad_norm": 0.3686187267303467, + "learning_rate": 6.009726543271914e-05, + "loss": 0.0982, + "step": 1258 + }, + { + "epoch": 0.9843627834245504, + "grad_norm": 0.41652533411979675, + "learning_rate": 6.0030425777028685e-05, + "loss": 0.1222, + "step": 1259 + }, + { + "epoch": 0.9851446442533229, + "grad_norm": 0.33978602290153503, + "learning_rate": 5.996356743998175e-05, + "loss": 0.1171, + "step": 1260 + }, + { + "epoch": 0.9859265050820953, + "grad_norm": 0.2727195918560028, + "learning_rate": 5.9896690546099906e-05, + "loss": 0.0973, + "step": 1261 + }, + { + "epoch": 0.9867083659108679, + "grad_norm": 0.3532811105251312, + "learning_rate": 5.982979521993929e-05, + "loss": 0.1165, + "step": 1262 + }, + { + "epoch": 0.9874902267396404, + "grad_norm": 0.43228256702423096, + "learning_rate": 5.9762881586090344e-05, + "loss": 0.1413, + "step": 1263 + }, + { + "epoch": 0.9882720875684128, + "grad_norm": 0.34505805373191833, + "learning_rate": 5.969594976917768e-05, + "loss": 0.1569, + "step": 1264 + }, + { + "epoch": 0.9890539483971853, + "grad_norm": 0.40569958090782166, + "learning_rate": 5.962899989385969e-05, + "loss": 0.1049, + "step": 1265 + }, + { + "epoch": 0.9898358092259578, + "grad_norm": 0.39335259795188904, + "learning_rate": 5.9562032084828457e-05, + "loss": 0.1012, + "step": 1266 + }, + { + "epoch": 0.9906176700547302, + "grad_norm": 0.39652782678604126, + "learning_rate": 5.9495046466809444e-05, + "loss": 0.1355, + "step": 1267 + }, + { + "epoch": 0.9913995308835027, + "grad_norm": 0.32174959778785706, + "learning_rate": 5.942804316456126e-05, + "loss": 0.1018, + "step": 1268 + }, + { + "epoch": 0.9921813917122753, + "grad_norm": 0.34195002913475037, + "learning_rate": 5.936102230287553e-05, + "loss": 0.1363, + "step": 1269 + }, + { + "epoch": 0.9929632525410477, + "grad_norm": 0.306333065032959, + "learning_rate": 5.929398400657647e-05, + "loss": 0.0909, + "step": 1270 + }, + { + "epoch": 0.9937451133698202, + "grad_norm": 0.4631345272064209, + "learning_rate": 5.9226928400520854e-05, + "loss": 0.1437, + "step": 1271 + }, + { + "epoch": 0.9945269741985927, + "grad_norm": 0.42312151193618774, + "learning_rate": 5.9159855609597645e-05, + "loss": 0.1745, + "step": 1272 + }, + { + "epoch": 0.9953088350273651, + "grad_norm": 0.34158191084861755, + "learning_rate": 5.9092765758727854e-05, + "loss": 0.1221, + "step": 1273 + }, + { + "epoch": 0.9960906958561376, + "grad_norm": 0.38159799575805664, + "learning_rate": 5.902565897286425e-05, + "loss": 0.103, + "step": 1274 + }, + { + "epoch": 0.99687255668491, + "grad_norm": 0.3136163651943207, + "learning_rate": 5.8958535376991106e-05, + "loss": 0.1218, + "step": 1275 + }, + { + "epoch": 0.9976544175136826, + "grad_norm": 0.3725820481777191, + "learning_rate": 5.8891395096124067e-05, + "loss": 0.1232, + "step": 1276 + }, + { + "epoch": 0.9984362783424551, + "grad_norm": 0.3924759030342102, + "learning_rate": 5.882423825530981e-05, + "loss": 0.1176, + "step": 1277 + }, + { + "epoch": 0.9992181391712275, + "grad_norm": 0.35742858052253723, + "learning_rate": 5.875706497962584e-05, + "loss": 0.1124, + "step": 1278 + }, + { + "epoch": 1.0, + "grad_norm": 0.42548516392707825, + "learning_rate": 5.8689875394180335e-05, + "loss": 0.14, + "step": 1279 + }, + { + "epoch": 1.0007818608287724, + "grad_norm": 0.3003665506839752, + "learning_rate": 5.8622669624111804e-05, + "loss": 0.0545, + "step": 1280 + }, + { + "epoch": 1.001563721657545, + "grad_norm": 0.24529042840003967, + "learning_rate": 5.855544779458887e-05, + "loss": 0.0673, + "step": 1281 + }, + { + "epoch": 1.0023455824863174, + "grad_norm": 0.3029482066631317, + "learning_rate": 5.8488210030810153e-05, + "loss": 0.0856, + "step": 1282 + }, + { + "epoch": 1.0031274433150899, + "grad_norm": 0.3220517933368683, + "learning_rate": 5.8420956458003876e-05, + "loss": 0.0782, + "step": 1283 + }, + { + "epoch": 1.0039093041438625, + "grad_norm": 0.37451228499412537, + "learning_rate": 5.835368720142773e-05, + "loss": 0.1183, + "step": 1284 + }, + { + "epoch": 1.0046911649726349, + "grad_norm": 0.33108869194984436, + "learning_rate": 5.828640238636861e-05, + "loss": 0.1057, + "step": 1285 + }, + { + "epoch": 1.0054730258014073, + "grad_norm": 0.2879885137081146, + "learning_rate": 5.82191021381424e-05, + "loss": 0.0831, + "step": 1286 + }, + { + "epoch": 1.00625488663018, + "grad_norm": 0.40735185146331787, + "learning_rate": 5.815178658209372e-05, + "loss": 0.1013, + "step": 1287 + }, + { + "epoch": 1.0070367474589523, + "grad_norm": 0.31678634881973267, + "learning_rate": 5.808445584359572e-05, + "loss": 0.0781, + "step": 1288 + }, + { + "epoch": 1.0078186082877247, + "grad_norm": 0.3188799023628235, + "learning_rate": 5.801711004804979e-05, + "loss": 0.0491, + "step": 1289 + }, + { + "epoch": 1.0086004691164974, + "grad_norm": 0.3288949728012085, + "learning_rate": 5.7949749320885394e-05, + "loss": 0.0748, + "step": 1290 + }, + { + "epoch": 1.0093823299452698, + "grad_norm": 0.3360097110271454, + "learning_rate": 5.7882373787559775e-05, + "loss": 0.0603, + "step": 1291 + }, + { + "epoch": 1.0101641907740422, + "grad_norm": 0.34202682971954346, + "learning_rate": 5.781498357355779e-05, + "loss": 0.0639, + "step": 1292 + }, + { + "epoch": 1.0109460516028146, + "grad_norm": 0.3513847589492798, + "learning_rate": 5.7747578804391624e-05, + "loss": 0.0636, + "step": 1293 + }, + { + "epoch": 1.0117279124315872, + "grad_norm": 0.3393630087375641, + "learning_rate": 5.768015960560055e-05, + "loss": 0.0515, + "step": 1294 + }, + { + "epoch": 1.0125097732603596, + "grad_norm": 0.4196714162826538, + "learning_rate": 5.761272610275074e-05, + "loss": 0.0524, + "step": 1295 + }, + { + "epoch": 1.013291634089132, + "grad_norm": 0.33013924956321716, + "learning_rate": 5.754527842143498e-05, + "loss": 0.0503, + "step": 1296 + }, + { + "epoch": 1.0140734949179047, + "grad_norm": 0.3442160189151764, + "learning_rate": 5.747781668727251e-05, + "loss": 0.0461, + "step": 1297 + }, + { + "epoch": 1.014855355746677, + "grad_norm": 0.3586715757846832, + "learning_rate": 5.741034102590871e-05, + "loss": 0.0656, + "step": 1298 + }, + { + "epoch": 1.0156372165754495, + "grad_norm": 0.3679417073726654, + "learning_rate": 5.73428515630149e-05, + "loss": 0.0644, + "step": 1299 + }, + { + "epoch": 1.016419077404222, + "grad_norm": 0.3188945949077606, + "learning_rate": 5.72753484242881e-05, + "loss": 0.0712, + "step": 1300 + }, + { + "epoch": 1.0172009382329945, + "grad_norm": 0.35757797956466675, + "learning_rate": 5.72078317354508e-05, + "loss": 0.0607, + "step": 1301 + }, + { + "epoch": 1.017982799061767, + "grad_norm": 0.4698977768421173, + "learning_rate": 5.714030162225077e-05, + "loss": 0.0533, + "step": 1302 + }, + { + "epoch": 1.0187646598905395, + "grad_norm": 0.3897135853767395, + "learning_rate": 5.7072758210460716e-05, + "loss": 0.0516, + "step": 1303 + }, + { + "epoch": 1.019546520719312, + "grad_norm": 0.6064049005508423, + "learning_rate": 5.700520162587817e-05, + "loss": 0.1096, + "step": 1304 + }, + { + "epoch": 1.0203283815480844, + "grad_norm": 0.3872586488723755, + "learning_rate": 5.693763199432516e-05, + "loss": 0.0424, + "step": 1305 + }, + { + "epoch": 1.021110242376857, + "grad_norm": 0.3485041558742523, + "learning_rate": 5.687004944164801e-05, + "loss": 0.0542, + "step": 1306 + }, + { + "epoch": 1.0218921032056294, + "grad_norm": 0.38680729269981384, + "learning_rate": 5.680245409371716e-05, + "loss": 0.0648, + "step": 1307 + }, + { + "epoch": 1.0226739640344018, + "grad_norm": 0.3012762665748596, + "learning_rate": 5.673484607642684e-05, + "loss": 0.0547, + "step": 1308 + }, + { + "epoch": 1.0234558248631744, + "grad_norm": 0.41317009925842285, + "learning_rate": 5.666722551569484e-05, + "loss": 0.0519, + "step": 1309 + }, + { + "epoch": 1.0242376856919468, + "grad_norm": 0.3628830313682556, + "learning_rate": 5.6599592537462406e-05, + "loss": 0.0963, + "step": 1310 + }, + { + "epoch": 1.0250195465207192, + "grad_norm": 0.39497920870780945, + "learning_rate": 5.653194726769382e-05, + "loss": 0.072, + "step": 1311 + }, + { + "epoch": 1.0258014073494919, + "grad_norm": 0.4044725298881531, + "learning_rate": 5.646428983237633e-05, + "loss": 0.1094, + "step": 1312 + }, + { + "epoch": 1.0265832681782643, + "grad_norm": 0.3730235695838928, + "learning_rate": 5.63966203575198e-05, + "loss": 0.0848, + "step": 1313 + }, + { + "epoch": 1.0273651290070367, + "grad_norm": 0.2993086874485016, + "learning_rate": 5.63289389691565e-05, + "loss": 0.0731, + "step": 1314 + }, + { + "epoch": 1.0281469898358093, + "grad_norm": 0.35841402411460876, + "learning_rate": 5.6261245793340944e-05, + "loss": 0.0795, + "step": 1315 + }, + { + "epoch": 1.0289288506645817, + "grad_norm": 0.39184918999671936, + "learning_rate": 5.619354095614955e-05, + "loss": 0.0848, + "step": 1316 + }, + { + "epoch": 1.0297107114933541, + "grad_norm": 0.30408361554145813, + "learning_rate": 5.612582458368047e-05, + "loss": 0.0419, + "step": 1317 + }, + { + "epoch": 1.0304925723221268, + "grad_norm": 0.33947086334228516, + "learning_rate": 5.60580968020534e-05, + "loss": 0.0504, + "step": 1318 + }, + { + "epoch": 1.0312744331508992, + "grad_norm": 0.31831854581832886, + "learning_rate": 5.599035773740915e-05, + "loss": 0.0588, + "step": 1319 + }, + { + "epoch": 1.0320562939796716, + "grad_norm": 0.38083136081695557, + "learning_rate": 5.592260751590967e-05, + "loss": 0.0768, + "step": 1320 + }, + { + "epoch": 1.0328381548084442, + "grad_norm": 0.31160804629325867, + "learning_rate": 5.5854846263737625e-05, + "loss": 0.1107, + "step": 1321 + }, + { + "epoch": 1.0336200156372166, + "grad_norm": 0.3518250584602356, + "learning_rate": 5.578707410709626e-05, + "loss": 0.0498, + "step": 1322 + }, + { + "epoch": 1.034401876465989, + "grad_norm": 0.43158453702926636, + "learning_rate": 5.571929117220911e-05, + "loss": 0.0467, + "step": 1323 + }, + { + "epoch": 1.0351837372947614, + "grad_norm": 0.47497236728668213, + "learning_rate": 5.5651497585319756e-05, + "loss": 0.0749, + "step": 1324 + }, + { + "epoch": 1.035965598123534, + "grad_norm": 0.32614821195602417, + "learning_rate": 5.558369347269169e-05, + "loss": 0.0593, + "step": 1325 + }, + { + "epoch": 1.0367474589523065, + "grad_norm": 0.30701157450675964, + "learning_rate": 5.551587896060795e-05, + "loss": 0.0771, + "step": 1326 + }, + { + "epoch": 1.0375293197810789, + "grad_norm": 0.394361674785614, + "learning_rate": 5.544805417537096e-05, + "loss": 0.0752, + "step": 1327 + }, + { + "epoch": 1.0383111806098515, + "grad_norm": 0.32267239689826965, + "learning_rate": 5.538021924330225e-05, + "loss": 0.0449, + "step": 1328 + }, + { + "epoch": 1.039093041438624, + "grad_norm": 0.3949149250984192, + "learning_rate": 5.531237429074231e-05, + "loss": 0.0756, + "step": 1329 + }, + { + "epoch": 1.0398749022673963, + "grad_norm": 0.3293861448764801, + "learning_rate": 5.5244519444050234e-05, + "loss": 0.1048, + "step": 1330 + }, + { + "epoch": 1.040656763096169, + "grad_norm": 0.3384082019329071, + "learning_rate": 5.517665482960359e-05, + "loss": 0.0579, + "step": 1331 + }, + { + "epoch": 1.0414386239249414, + "grad_norm": 0.29830190539360046, + "learning_rate": 5.510878057379809e-05, + "loss": 0.0593, + "step": 1332 + }, + { + "epoch": 1.0422204847537138, + "grad_norm": 0.5683103203773499, + "learning_rate": 5.504089680304745e-05, + "loss": 0.0614, + "step": 1333 + }, + { + "epoch": 1.0430023455824864, + "grad_norm": 0.3690869212150574, + "learning_rate": 5.497300364378306e-05, + "loss": 0.0643, + "step": 1334 + }, + { + "epoch": 1.0437842064112588, + "grad_norm": 0.32530248165130615, + "learning_rate": 5.490510122245384e-05, + "loss": 0.0569, + "step": 1335 + }, + { + "epoch": 1.0445660672400312, + "grad_norm": 0.4188139736652374, + "learning_rate": 5.483718966552594e-05, + "loss": 0.1026, + "step": 1336 + }, + { + "epoch": 1.0453479280688038, + "grad_norm": 0.4417498707771301, + "learning_rate": 5.47692690994825e-05, + "loss": 0.0764, + "step": 1337 + }, + { + "epoch": 1.0461297888975762, + "grad_norm": 0.312726229429245, + "learning_rate": 5.47013396508235e-05, + "loss": 0.0339, + "step": 1338 + }, + { + "epoch": 1.0469116497263486, + "grad_norm": 0.46260857582092285, + "learning_rate": 5.463340144606541e-05, + "loss": 0.103, + "step": 1339 + }, + { + "epoch": 1.0476935105551213, + "grad_norm": 0.4564020335674286, + "learning_rate": 5.456545461174102e-05, + "loss": 0.0427, + "step": 1340 + }, + { + "epoch": 1.0484753713838937, + "grad_norm": 0.2873257100582123, + "learning_rate": 5.449749927439922e-05, + "loss": 0.0531, + "step": 1341 + }, + { + "epoch": 1.049257232212666, + "grad_norm": 0.4216899871826172, + "learning_rate": 5.44295355606047e-05, + "loss": 0.0673, + "step": 1342 + }, + { + "epoch": 1.0500390930414387, + "grad_norm": 0.4410907030105591, + "learning_rate": 5.436156359693777e-05, + "loss": 0.0768, + "step": 1343 + }, + { + "epoch": 1.0508209538702111, + "grad_norm": 0.4740668833255768, + "learning_rate": 5.42935835099941e-05, + "loss": 0.0923, + "step": 1344 + }, + { + "epoch": 1.0516028146989835, + "grad_norm": 0.34221503138542175, + "learning_rate": 5.422559542638448e-05, + "loss": 0.0407, + "step": 1345 + }, + { + "epoch": 1.0523846755277562, + "grad_norm": 0.35219067335128784, + "learning_rate": 5.415759947273462e-05, + "loss": 0.0716, + "step": 1346 + }, + { + "epoch": 1.0531665363565286, + "grad_norm": 0.3162952959537506, + "learning_rate": 5.4089595775684886e-05, + "loss": 0.0487, + "step": 1347 + }, + { + "epoch": 1.053948397185301, + "grad_norm": 0.3372567594051361, + "learning_rate": 5.4021584461890015e-05, + "loss": 0.0387, + "step": 1348 + }, + { + "epoch": 1.0547302580140734, + "grad_norm": 0.40237340331077576, + "learning_rate": 5.395356565801899e-05, + "loss": 0.0505, + "step": 1349 + }, + { + "epoch": 1.055512118842846, + "grad_norm": 0.3668085038661957, + "learning_rate": 5.388553949075471e-05, + "loss": 0.0367, + "step": 1350 + }, + { + "epoch": 1.0562939796716184, + "grad_norm": 0.4131850302219391, + "learning_rate": 5.3817506086793813e-05, + "loss": 0.058, + "step": 1351 + }, + { + "epoch": 1.0570758405003908, + "grad_norm": 0.43202531337738037, + "learning_rate": 5.374946557284638e-05, + "loss": 0.066, + "step": 1352 + }, + { + "epoch": 1.0578577013291635, + "grad_norm": 0.41850730776786804, + "learning_rate": 5.368141807563578e-05, + "loss": 0.0603, + "step": 1353 + }, + { + "epoch": 1.0586395621579359, + "grad_norm": 0.5610138773918152, + "learning_rate": 5.361336372189835e-05, + "loss": 0.0686, + "step": 1354 + }, + { + "epoch": 1.0594214229867083, + "grad_norm": 0.4340571165084839, + "learning_rate": 5.35453026383832e-05, + "loss": 0.0597, + "step": 1355 + }, + { + "epoch": 1.060203283815481, + "grad_norm": 0.526792049407959, + "learning_rate": 5.3477234951852e-05, + "loss": 0.0695, + "step": 1356 + }, + { + "epoch": 1.0609851446442533, + "grad_norm": 0.3665534555912018, + "learning_rate": 5.34091607890787e-05, + "loss": 0.0816, + "step": 1357 + }, + { + "epoch": 1.0617670054730257, + "grad_norm": 0.29408949613571167, + "learning_rate": 5.334108027684931e-05, + "loss": 0.0321, + "step": 1358 + }, + { + "epoch": 1.0625488663017983, + "grad_norm": 0.5186415910720825, + "learning_rate": 5.327299354196167e-05, + "loss": 0.1035, + "step": 1359 + }, + { + "epoch": 1.0633307271305708, + "grad_norm": 0.3105923533439636, + "learning_rate": 5.320490071122519e-05, + "loss": 0.0377, + "step": 1360 + }, + { + "epoch": 1.0641125879593432, + "grad_norm": 0.3671916723251343, + "learning_rate": 5.313680191146071e-05, + "loss": 0.0622, + "step": 1361 + }, + { + "epoch": 1.0648944487881158, + "grad_norm": 0.38357749581336975, + "learning_rate": 5.306869726950007e-05, + "loss": 0.0636, + "step": 1362 + }, + { + "epoch": 1.0656763096168882, + "grad_norm": 0.41577282547950745, + "learning_rate": 5.300058691218607e-05, + "loss": 0.0387, + "step": 1363 + }, + { + "epoch": 1.0664581704456606, + "grad_norm": 0.3620113134384155, + "learning_rate": 5.293247096637214e-05, + "loss": 0.0534, + "step": 1364 + }, + { + "epoch": 1.0672400312744332, + "grad_norm": 0.3379097878932953, + "learning_rate": 5.28643495589221e-05, + "loss": 0.0686, + "step": 1365 + }, + { + "epoch": 1.0680218921032056, + "grad_norm": 0.26274728775024414, + "learning_rate": 5.279622281670998e-05, + "loss": 0.0368, + "step": 1366 + }, + { + "epoch": 1.068803752931978, + "grad_norm": 0.35920795798301697, + "learning_rate": 5.272809086661972e-05, + "loss": 0.0625, + "step": 1367 + }, + { + "epoch": 1.0695856137607507, + "grad_norm": 0.32537350058555603, + "learning_rate": 5.2659953835544916e-05, + "loss": 0.0647, + "step": 1368 + }, + { + "epoch": 1.070367474589523, + "grad_norm": 0.38874363899230957, + "learning_rate": 5.2591811850388726e-05, + "loss": 0.0664, + "step": 1369 + }, + { + "epoch": 1.0711493354182955, + "grad_norm": 0.3997640013694763, + "learning_rate": 5.252366503806345e-05, + "loss": 0.0453, + "step": 1370 + }, + { + "epoch": 1.0719311962470681, + "grad_norm": 0.4044627845287323, + "learning_rate": 5.245551352549044e-05, + "loss": 0.0725, + "step": 1371 + }, + { + "epoch": 1.0727130570758405, + "grad_norm": 0.40725669264793396, + "learning_rate": 5.238735743959974e-05, + "loss": 0.0528, + "step": 1372 + }, + { + "epoch": 1.073494917904613, + "grad_norm": 0.3452327847480774, + "learning_rate": 5.231919690732995e-05, + "loss": 0.0454, + "step": 1373 + }, + { + "epoch": 1.0742767787333856, + "grad_norm": 0.33204716444015503, + "learning_rate": 5.2251032055627946e-05, + "loss": 0.0335, + "step": 1374 + }, + { + "epoch": 1.075058639562158, + "grad_norm": 0.39199399948120117, + "learning_rate": 5.218286301144867e-05, + "loss": 0.038, + "step": 1375 + }, + { + "epoch": 1.0758405003909304, + "grad_norm": 0.44110965728759766, + "learning_rate": 5.211468990175481e-05, + "loss": 0.0997, + "step": 1376 + }, + { + "epoch": 1.076622361219703, + "grad_norm": 0.5448676347732544, + "learning_rate": 5.2046512853516696e-05, + "loss": 0.0656, + "step": 1377 + }, + { + "epoch": 1.0774042220484754, + "grad_norm": 0.450676828622818, + "learning_rate": 5.197833199371194e-05, + "loss": 0.0515, + "step": 1378 + }, + { + "epoch": 1.0781860828772478, + "grad_norm": 0.373246967792511, + "learning_rate": 5.1910147449325295e-05, + "loss": 0.0403, + "step": 1379 + }, + { + "epoch": 1.0789679437060202, + "grad_norm": 0.4239073693752289, + "learning_rate": 5.184195934734835e-05, + "loss": 0.0699, + "step": 1380 + }, + { + "epoch": 1.0797498045347929, + "grad_norm": 0.48097920417785645, + "learning_rate": 5.177376781477933e-05, + "loss": 0.0779, + "step": 1381 + }, + { + "epoch": 1.0805316653635653, + "grad_norm": 0.3312666118144989, + "learning_rate": 5.170557297862284e-05, + "loss": 0.0444, + "step": 1382 + }, + { + "epoch": 1.0813135261923377, + "grad_norm": 0.328531414270401, + "learning_rate": 5.163737496588964e-05, + "loss": 0.0436, + "step": 1383 + }, + { + "epoch": 1.0820953870211103, + "grad_norm": 0.303060382604599, + "learning_rate": 5.156917390359643e-05, + "loss": 0.0375, + "step": 1384 + }, + { + "epoch": 1.0828772478498827, + "grad_norm": 0.3380538821220398, + "learning_rate": 5.150096991876556e-05, + "loss": 0.0416, + "step": 1385 + }, + { + "epoch": 1.0836591086786551, + "grad_norm": 0.3253069519996643, + "learning_rate": 5.143276313842485e-05, + "loss": 0.058, + "step": 1386 + }, + { + "epoch": 1.0844409695074277, + "grad_norm": 0.38924887776374817, + "learning_rate": 5.1364553689607264e-05, + "loss": 0.074, + "step": 1387 + }, + { + "epoch": 1.0852228303362002, + "grad_norm": 0.39946839213371277, + "learning_rate": 5.129634169935081e-05, + "loss": 0.0764, + "step": 1388 + }, + { + "epoch": 1.0860046911649726, + "grad_norm": 0.3759361803531647, + "learning_rate": 5.1228127294698225e-05, + "loss": 0.0538, + "step": 1389 + }, + { + "epoch": 1.0867865519937452, + "grad_norm": 0.3306618928909302, + "learning_rate": 5.1159910602696683e-05, + "loss": 0.0857, + "step": 1390 + }, + { + "epoch": 1.0875684128225176, + "grad_norm": 0.5133655071258545, + "learning_rate": 5.1091691750397675e-05, + "loss": 0.0727, + "step": 1391 + }, + { + "epoch": 1.08835027365129, + "grad_norm": 0.43473249673843384, + "learning_rate": 5.1023470864856684e-05, + "loss": 0.0567, + "step": 1392 + }, + { + "epoch": 1.0891321344800626, + "grad_norm": 0.4632827043533325, + "learning_rate": 5.0955248073132975e-05, + "loss": 0.0531, + "step": 1393 + }, + { + "epoch": 1.089913995308835, + "grad_norm": 0.4766741991043091, + "learning_rate": 5.088702350228942e-05, + "loss": 0.0995, + "step": 1394 + }, + { + "epoch": 1.0906958561376074, + "grad_norm": 0.37831687927246094, + "learning_rate": 5.081879727939214e-05, + "loss": 0.0374, + "step": 1395 + }, + { + "epoch": 1.09147771696638, + "grad_norm": 0.4387483298778534, + "learning_rate": 5.075056953151035e-05, + "loss": 0.0639, + "step": 1396 + }, + { + "epoch": 1.0922595777951525, + "grad_norm": 0.3122713565826416, + "learning_rate": 5.068234038571612e-05, + "loss": 0.0737, + "step": 1397 + }, + { + "epoch": 1.0930414386239249, + "grad_norm": 0.39683401584625244, + "learning_rate": 5.0614109969084125e-05, + "loss": 0.1093, + "step": 1398 + }, + { + "epoch": 1.0938232994526975, + "grad_norm": 0.34212830662727356, + "learning_rate": 5.054587840869136e-05, + "loss": 0.047, + "step": 1399 + }, + { + "epoch": 1.09460516028147, + "grad_norm": 0.4159735143184662, + "learning_rate": 5.047764583161704e-05, + "loss": 0.0899, + "step": 1400 + }, + { + "epoch": 1.09460516028147, + "eval_loss": 0.12889541685581207, + "eval_runtime": 13.3979, + "eval_samples_per_second": 3.881, + "eval_steps_per_second": 0.97, + "step": 1400 + }, + { + "epoch": 1.0953870211102423, + "grad_norm": 0.3841370940208435, + "learning_rate": 5.0409412364942165e-05, + "loss": 0.0606, + "step": 1401 + }, + { + "epoch": 1.0961688819390147, + "grad_norm": 0.41264137625694275, + "learning_rate": 5.03411781357495e-05, + "loss": 0.0637, + "step": 1402 + }, + { + "epoch": 1.0969507427677874, + "grad_norm": 0.3105151057243347, + "learning_rate": 5.027294327112314e-05, + "loss": 0.0486, + "step": 1403 + }, + { + "epoch": 1.0977326035965598, + "grad_norm": 0.31089651584625244, + "learning_rate": 5.020470789814842e-05, + "loss": 0.055, + "step": 1404 + }, + { + "epoch": 1.0985144644253322, + "grad_norm": 0.3997081518173218, + "learning_rate": 5.01364721439116e-05, + "loss": 0.0696, + "step": 1405 + }, + { + "epoch": 1.0992963252541048, + "grad_norm": 0.4661003649234772, + "learning_rate": 5.006823613549964e-05, + "loss": 0.0712, + "step": 1406 + }, + { + "epoch": 1.1000781860828772, + "grad_norm": 0.4002636969089508, + "learning_rate": 5e-05, + "loss": 0.0556, + "step": 1407 + }, + { + "epoch": 1.1008600469116496, + "grad_norm": 0.402055025100708, + "learning_rate": 4.993176386450036e-05, + "loss": 0.0556, + "step": 1408 + }, + { + "epoch": 1.1016419077404223, + "grad_norm": 0.4962114095687866, + "learning_rate": 4.986352785608842e-05, + "loss": 0.0714, + "step": 1409 + }, + { + "epoch": 1.1024237685691947, + "grad_norm": 0.40611883997917175, + "learning_rate": 4.97952921018516e-05, + "loss": 0.0681, + "step": 1410 + }, + { + "epoch": 1.103205629397967, + "grad_norm": 0.5475478172302246, + "learning_rate": 4.9727056728876865e-05, + "loss": 0.1258, + "step": 1411 + }, + { + "epoch": 1.1039874902267397, + "grad_norm": 0.33019861578941345, + "learning_rate": 4.965882186425053e-05, + "loss": 0.0628, + "step": 1412 + }, + { + "epoch": 1.104769351055512, + "grad_norm": 0.2784503698348999, + "learning_rate": 4.959058763505784e-05, + "loss": 0.0396, + "step": 1413 + }, + { + "epoch": 1.1055512118842845, + "grad_norm": 0.4385380446910858, + "learning_rate": 4.9522354168382973e-05, + "loss": 0.0941, + "step": 1414 + }, + { + "epoch": 1.1063330727130571, + "grad_norm": 0.33498090505599976, + "learning_rate": 4.945412159130864e-05, + "loss": 0.0423, + "step": 1415 + }, + { + "epoch": 1.1071149335418295, + "grad_norm": 0.3903992474079132, + "learning_rate": 4.9385890030915907e-05, + "loss": 0.0581, + "step": 1416 + }, + { + "epoch": 1.107896794370602, + "grad_norm": 0.31511014699935913, + "learning_rate": 4.931765961428389e-05, + "loss": 0.063, + "step": 1417 + }, + { + "epoch": 1.1086786551993746, + "grad_norm": 0.4896702170372009, + "learning_rate": 4.9249430468489646e-05, + "loss": 0.0814, + "step": 1418 + }, + { + "epoch": 1.109460516028147, + "grad_norm": 0.3954057991504669, + "learning_rate": 4.9181202720607874e-05, + "loss": 0.0563, + "step": 1419 + }, + { + "epoch": 1.1102423768569194, + "grad_norm": 0.36815062165260315, + "learning_rate": 4.9112976497710586e-05, + "loss": 0.0694, + "step": 1420 + }, + { + "epoch": 1.111024237685692, + "grad_norm": 0.4068279564380646, + "learning_rate": 4.904475192686702e-05, + "loss": 0.0636, + "step": 1421 + }, + { + "epoch": 1.1118060985144644, + "grad_norm": 0.42344993352890015, + "learning_rate": 4.897652913514334e-05, + "loss": 0.0479, + "step": 1422 + }, + { + "epoch": 1.1125879593432368, + "grad_norm": 0.43622395396232605, + "learning_rate": 4.890830824960234e-05, + "loss": 0.0585, + "step": 1423 + }, + { + "epoch": 1.1133698201720095, + "grad_norm": 0.2758655250072479, + "learning_rate": 4.884008939730333e-05, + "loss": 0.047, + "step": 1424 + }, + { + "epoch": 1.1141516810007819, + "grad_norm": 0.39345523715019226, + "learning_rate": 4.877187270530178e-05, + "loss": 0.0497, + "step": 1425 + }, + { + "epoch": 1.1149335418295543, + "grad_norm": 0.48883023858070374, + "learning_rate": 4.87036583006492e-05, + "loss": 0.0757, + "step": 1426 + }, + { + "epoch": 1.115715402658327, + "grad_norm": 0.34832096099853516, + "learning_rate": 4.863544631039275e-05, + "loss": 0.0448, + "step": 1427 + }, + { + "epoch": 1.1164972634870993, + "grad_norm": 0.4675982594490051, + "learning_rate": 4.856723686157516e-05, + "loss": 0.0671, + "step": 1428 + }, + { + "epoch": 1.1172791243158717, + "grad_norm": 0.37189602851867676, + "learning_rate": 4.8499030081234444e-05, + "loss": 0.0909, + "step": 1429 + }, + { + "epoch": 1.1180609851446444, + "grad_norm": 0.3896830976009369, + "learning_rate": 4.843082609640357e-05, + "loss": 0.0654, + "step": 1430 + }, + { + "epoch": 1.1188428459734168, + "grad_norm": 0.31397873163223267, + "learning_rate": 4.8362625034110354e-05, + "loss": 0.0473, + "step": 1431 + }, + { + "epoch": 1.1196247068021892, + "grad_norm": 0.45029687881469727, + "learning_rate": 4.829442702137718e-05, + "loss": 0.0544, + "step": 1432 + }, + { + "epoch": 1.1204065676309618, + "grad_norm": 0.3392208516597748, + "learning_rate": 4.8226232185220684e-05, + "loss": 0.043, + "step": 1433 + }, + { + "epoch": 1.1211884284597342, + "grad_norm": 0.4472123384475708, + "learning_rate": 4.815804065265166e-05, + "loss": 0.0761, + "step": 1434 + }, + { + "epoch": 1.1219702892885066, + "grad_norm": 0.35860681533813477, + "learning_rate": 4.808985255067473e-05, + "loss": 0.0746, + "step": 1435 + }, + { + "epoch": 1.122752150117279, + "grad_norm": 0.4250498414039612, + "learning_rate": 4.8021668006288075e-05, + "loss": 0.0512, + "step": 1436 + }, + { + "epoch": 1.1235340109460517, + "grad_norm": 0.3914888799190521, + "learning_rate": 4.7953487146483315e-05, + "loss": 0.0869, + "step": 1437 + }, + { + "epoch": 1.124315871774824, + "grad_norm": 0.544806957244873, + "learning_rate": 4.78853100982452e-05, + "loss": 0.0859, + "step": 1438 + }, + { + "epoch": 1.1250977326035965, + "grad_norm": 0.4872533679008484, + "learning_rate": 4.781713698855135e-05, + "loss": 0.069, + "step": 1439 + }, + { + "epoch": 1.125879593432369, + "grad_norm": 0.41077733039855957, + "learning_rate": 4.774896794437206e-05, + "loss": 0.0658, + "step": 1440 + }, + { + "epoch": 1.1266614542611415, + "grad_norm": 0.5026431083679199, + "learning_rate": 4.7680803092670054e-05, + "loss": 0.0768, + "step": 1441 + }, + { + "epoch": 1.127443315089914, + "grad_norm": 0.45740193128585815, + "learning_rate": 4.761264256040028e-05, + "loss": 0.0676, + "step": 1442 + }, + { + "epoch": 1.1282251759186865, + "grad_norm": 0.3656143844127655, + "learning_rate": 4.754448647450957e-05, + "loss": 0.0688, + "step": 1443 + }, + { + "epoch": 1.129007036747459, + "grad_norm": 0.4475756883621216, + "learning_rate": 4.747633496193655e-05, + "loss": 0.0507, + "step": 1444 + }, + { + "epoch": 1.1297888975762314, + "grad_norm": 0.33670148253440857, + "learning_rate": 4.740818814961129e-05, + "loss": 0.032, + "step": 1445 + }, + { + "epoch": 1.130570758405004, + "grad_norm": 0.5176746249198914, + "learning_rate": 4.7340046164455096e-05, + "loss": 0.0778, + "step": 1446 + }, + { + "epoch": 1.1313526192337764, + "grad_norm": 0.2944004237651825, + "learning_rate": 4.72719091333803e-05, + "loss": 0.081, + "step": 1447 + }, + { + "epoch": 1.1321344800625488, + "grad_norm": 0.36607813835144043, + "learning_rate": 4.720377718329003e-05, + "loss": 0.062, + "step": 1448 + }, + { + "epoch": 1.1329163408913214, + "grad_norm": 0.38473501801490784, + "learning_rate": 4.713565044107792e-05, + "loss": 0.0809, + "step": 1449 + }, + { + "epoch": 1.1336982017200938, + "grad_norm": 0.37212830781936646, + "learning_rate": 4.7067529033627874e-05, + "loss": 0.0573, + "step": 1450 + }, + { + "epoch": 1.1344800625488662, + "grad_norm": 0.35405033826828003, + "learning_rate": 4.699941308781394e-05, + "loss": 0.053, + "step": 1451 + }, + { + "epoch": 1.1352619233776389, + "grad_norm": 0.469467431306839, + "learning_rate": 4.693130273049995e-05, + "loss": 0.065, + "step": 1452 + }, + { + "epoch": 1.1360437842064113, + "grad_norm": 0.37710899114608765, + "learning_rate": 4.686319808853931e-05, + "loss": 0.0766, + "step": 1453 + }, + { + "epoch": 1.1368256450351837, + "grad_norm": 0.49938127398490906, + "learning_rate": 4.679509928877481e-05, + "loss": 0.0463, + "step": 1454 + }, + { + "epoch": 1.137607505863956, + "grad_norm": 0.3544851839542389, + "learning_rate": 4.6727006458038357e-05, + "loss": 0.0523, + "step": 1455 + }, + { + "epoch": 1.1383893666927287, + "grad_norm": 0.26120486855506897, + "learning_rate": 4.6658919723150706e-05, + "loss": 0.0476, + "step": 1456 + }, + { + "epoch": 1.1391712275215011, + "grad_norm": 0.4471639096736908, + "learning_rate": 4.659083921092131e-05, + "loss": 0.076, + "step": 1457 + }, + { + "epoch": 1.1399530883502735, + "grad_norm": 0.3284335434436798, + "learning_rate": 4.6522765048148015e-05, + "loss": 0.0456, + "step": 1458 + }, + { + "epoch": 1.1407349491790462, + "grad_norm": 0.35543978214263916, + "learning_rate": 4.6454697361616816e-05, + "loss": 0.0578, + "step": 1459 + }, + { + "epoch": 1.1415168100078186, + "grad_norm": 0.3903660476207733, + "learning_rate": 4.6386636278101665e-05, + "loss": 0.0421, + "step": 1460 + }, + { + "epoch": 1.142298670836591, + "grad_norm": 0.39665699005126953, + "learning_rate": 4.631858192436422e-05, + "loss": 0.0684, + "step": 1461 + }, + { + "epoch": 1.1430805316653636, + "grad_norm": 0.44944366812705994, + "learning_rate": 4.6250534427153626e-05, + "loss": 0.0639, + "step": 1462 + }, + { + "epoch": 1.143862392494136, + "grad_norm": 0.3654981553554535, + "learning_rate": 4.6182493913206205e-05, + "loss": 0.0533, + "step": 1463 + }, + { + "epoch": 1.1446442533229084, + "grad_norm": 0.38796553015708923, + "learning_rate": 4.611446050924529e-05, + "loss": 0.0969, + "step": 1464 + }, + { + "epoch": 1.145426114151681, + "grad_norm": 0.31265556812286377, + "learning_rate": 4.6046434341981034e-05, + "loss": 0.0347, + "step": 1465 + }, + { + "epoch": 1.1462079749804535, + "grad_norm": 0.4331945776939392, + "learning_rate": 4.597841553810999e-05, + "loss": 0.0629, + "step": 1466 + }, + { + "epoch": 1.1469898358092259, + "grad_norm": 0.2969951927661896, + "learning_rate": 4.591040422431512e-05, + "loss": 0.0649, + "step": 1467 + }, + { + "epoch": 1.1477716966379985, + "grad_norm": 0.3629123270511627, + "learning_rate": 4.5842400527265386e-05, + "loss": 0.0794, + "step": 1468 + }, + { + "epoch": 1.148553557466771, + "grad_norm": 0.33106693625450134, + "learning_rate": 4.5774404573615534e-05, + "loss": 0.0347, + "step": 1469 + }, + { + "epoch": 1.1493354182955433, + "grad_norm": 0.3500162363052368, + "learning_rate": 4.570641649000591e-05, + "loss": 0.031, + "step": 1470 + }, + { + "epoch": 1.150117279124316, + "grad_norm": 0.39276114106178284, + "learning_rate": 4.563843640306225e-05, + "loss": 0.0432, + "step": 1471 + }, + { + "epoch": 1.1508991399530883, + "grad_norm": 0.5838002562522888, + "learning_rate": 4.557046443939531e-05, + "loss": 0.1249, + "step": 1472 + }, + { + "epoch": 1.1516810007818608, + "grad_norm": 0.5215751528739929, + "learning_rate": 4.550250072560079e-05, + "loss": 0.0673, + "step": 1473 + }, + { + "epoch": 1.1524628616106334, + "grad_norm": 0.30646979808807373, + "learning_rate": 4.543454538825898e-05, + "loss": 0.0486, + "step": 1474 + }, + { + "epoch": 1.1532447224394058, + "grad_norm": 0.4018358588218689, + "learning_rate": 4.5366598553934606e-05, + "loss": 0.0595, + "step": 1475 + }, + { + "epoch": 1.1540265832681782, + "grad_norm": 0.34716930985450745, + "learning_rate": 4.529866034917651e-05, + "loss": 0.0528, + "step": 1476 + }, + { + "epoch": 1.1548084440969508, + "grad_norm": 0.46272578835487366, + "learning_rate": 4.523073090051751e-05, + "loss": 0.0574, + "step": 1477 + }, + { + "epoch": 1.1555903049257232, + "grad_norm": 0.4649121165275574, + "learning_rate": 4.516281033447408e-05, + "loss": 0.0682, + "step": 1478 + }, + { + "epoch": 1.1563721657544956, + "grad_norm": 0.43052923679351807, + "learning_rate": 4.5094898777546175e-05, + "loss": 0.0839, + "step": 1479 + }, + { + "epoch": 1.1571540265832683, + "grad_norm": 0.3807370960712433, + "learning_rate": 4.5026996356216945e-05, + "loss": 0.0934, + "step": 1480 + }, + { + "epoch": 1.1579358874120407, + "grad_norm": 0.3976602554321289, + "learning_rate": 4.495910319695257e-05, + "loss": 0.0474, + "step": 1481 + }, + { + "epoch": 1.158717748240813, + "grad_norm": 0.3352206349372864, + "learning_rate": 4.4891219426201914e-05, + "loss": 0.0811, + "step": 1482 + }, + { + "epoch": 1.1594996090695857, + "grad_norm": 0.2787337005138397, + "learning_rate": 4.4823345170396417e-05, + "loss": 0.0863, + "step": 1483 + }, + { + "epoch": 1.1602814698983581, + "grad_norm": 0.4110407531261444, + "learning_rate": 4.475548055594976e-05, + "loss": 0.061, + "step": 1484 + }, + { + "epoch": 1.1610633307271305, + "grad_norm": 0.3591212034225464, + "learning_rate": 4.468762570925771e-05, + "loss": 0.0481, + "step": 1485 + }, + { + "epoch": 1.1618451915559032, + "grad_norm": 0.48095250129699707, + "learning_rate": 4.461978075669775e-05, + "loss": 0.0568, + "step": 1486 + }, + { + "epoch": 1.1626270523846756, + "grad_norm": 0.4325644075870514, + "learning_rate": 4.455194582462906e-05, + "loss": 0.0506, + "step": 1487 + }, + { + "epoch": 1.163408913213448, + "grad_norm": 0.3393900692462921, + "learning_rate": 4.4484121039392074e-05, + "loss": 0.0424, + "step": 1488 + }, + { + "epoch": 1.1641907740422206, + "grad_norm": 0.38791218400001526, + "learning_rate": 4.441630652730831e-05, + "loss": 0.0393, + "step": 1489 + }, + { + "epoch": 1.164972634870993, + "grad_norm": 0.4067123532295227, + "learning_rate": 4.4348502414680236e-05, + "loss": 0.0613, + "step": 1490 + }, + { + "epoch": 1.1657544956997654, + "grad_norm": 0.3250060975551605, + "learning_rate": 4.428070882779091e-05, + "loss": 0.0518, + "step": 1491 + }, + { + "epoch": 1.166536356528538, + "grad_norm": 0.3601875901222229, + "learning_rate": 4.4212925892903745e-05, + "loss": 0.0491, + "step": 1492 + }, + { + "epoch": 1.1673182173573105, + "grad_norm": 0.42251959443092346, + "learning_rate": 4.4145153736262387e-05, + "loss": 0.0437, + "step": 1493 + }, + { + "epoch": 1.1681000781860829, + "grad_norm": 0.3853507936000824, + "learning_rate": 4.407739248409036e-05, + "loss": 0.0344, + "step": 1494 + }, + { + "epoch": 1.1688819390148553, + "grad_norm": 0.40151551365852356, + "learning_rate": 4.400964226259087e-05, + "loss": 0.0763, + "step": 1495 + }, + { + "epoch": 1.169663799843628, + "grad_norm": 0.2914046049118042, + "learning_rate": 4.3941903197946614e-05, + "loss": 0.0283, + "step": 1496 + }, + { + "epoch": 1.1704456606724003, + "grad_norm": 0.3761674463748932, + "learning_rate": 4.387417541631952e-05, + "loss": 0.0524, + "step": 1497 + }, + { + "epoch": 1.1712275215011727, + "grad_norm": 0.5237846374511719, + "learning_rate": 4.3806459043850465e-05, + "loss": 0.0456, + "step": 1498 + }, + { + "epoch": 1.1720093823299453, + "grad_norm": 0.40119150280952454, + "learning_rate": 4.373875420665907e-05, + "loss": 0.0599, + "step": 1499 + }, + { + "epoch": 1.1727912431587177, + "grad_norm": 0.4992891252040863, + "learning_rate": 4.3671061030843495e-05, + "loss": 0.0484, + "step": 1500 + }, + { + "epoch": 1.1735731039874902, + "grad_norm": 0.45423582196235657, + "learning_rate": 4.3603379642480216e-05, + "loss": 0.0905, + "step": 1501 + }, + { + "epoch": 1.1743549648162628, + "grad_norm": 0.47261035442352295, + "learning_rate": 4.353571016762368e-05, + "loss": 0.0506, + "step": 1502 + }, + { + "epoch": 1.1751368256450352, + "grad_norm": 0.40774956345558167, + "learning_rate": 4.3468052732306184e-05, + "loss": 0.0532, + "step": 1503 + }, + { + "epoch": 1.1759186864738076, + "grad_norm": 0.4656347632408142, + "learning_rate": 4.340040746253762e-05, + "loss": 0.0451, + "step": 1504 + }, + { + "epoch": 1.1767005473025802, + "grad_norm": 0.2806099057197571, + "learning_rate": 4.333277448430517e-05, + "loss": 0.0388, + "step": 1505 + }, + { + "epoch": 1.1774824081313526, + "grad_norm": 0.37031981348991394, + "learning_rate": 4.3265153923573174e-05, + "loss": 0.0373, + "step": 1506 + }, + { + "epoch": 1.178264268960125, + "grad_norm": 0.4706210792064667, + "learning_rate": 4.3197545906282845e-05, + "loss": 0.0478, + "step": 1507 + }, + { + "epoch": 1.1790461297888977, + "grad_norm": 0.4227662980556488, + "learning_rate": 4.3129950558352e-05, + "loss": 0.0451, + "step": 1508 + }, + { + "epoch": 1.17982799061767, + "grad_norm": 0.5277842283248901, + "learning_rate": 4.306236800567485e-05, + "loss": 0.0586, + "step": 1509 + }, + { + "epoch": 1.1806098514464425, + "grad_norm": 0.5246910452842712, + "learning_rate": 4.2994798374121834e-05, + "loss": 0.053, + "step": 1510 + }, + { + "epoch": 1.181391712275215, + "grad_norm": 0.4553016126155853, + "learning_rate": 4.292724178953929e-05, + "loss": 0.083, + "step": 1511 + }, + { + "epoch": 1.1821735731039875, + "grad_norm": 0.36565476655960083, + "learning_rate": 4.2859698377749244e-05, + "loss": 0.0493, + "step": 1512 + }, + { + "epoch": 1.18295543393276, + "grad_norm": 0.38505685329437256, + "learning_rate": 4.27921682645492e-05, + "loss": 0.085, + "step": 1513 + }, + { + "epoch": 1.1837372947615323, + "grad_norm": 0.46367672085762024, + "learning_rate": 4.272465157571193e-05, + "loss": 0.0626, + "step": 1514 + }, + { + "epoch": 1.184519155590305, + "grad_norm": 0.34231388568878174, + "learning_rate": 4.265714843698511e-05, + "loss": 0.0646, + "step": 1515 + }, + { + "epoch": 1.1853010164190774, + "grad_norm": 0.3470911979675293, + "learning_rate": 4.258965897409129e-05, + "loss": 0.0529, + "step": 1516 + }, + { + "epoch": 1.1860828772478498, + "grad_norm": 0.5022766590118408, + "learning_rate": 4.2522183312727496e-05, + "loss": 0.0774, + "step": 1517 + }, + { + "epoch": 1.1868647380766224, + "grad_norm": 0.40239912271499634, + "learning_rate": 4.2454721578565024e-05, + "loss": 0.0573, + "step": 1518 + }, + { + "epoch": 1.1876465989053948, + "grad_norm": 0.4047490358352661, + "learning_rate": 4.238727389724927e-05, + "loss": 0.069, + "step": 1519 + }, + { + "epoch": 1.1884284597341672, + "grad_norm": 0.3190065622329712, + "learning_rate": 4.231984039439945e-05, + "loss": 0.0597, + "step": 1520 + }, + { + "epoch": 1.1892103205629398, + "grad_norm": 0.38884586095809937, + "learning_rate": 4.225242119560839e-05, + "loss": 0.0433, + "step": 1521 + }, + { + "epoch": 1.1899921813917123, + "grad_norm": 0.4333357512950897, + "learning_rate": 4.2185016426442214e-05, + "loss": 0.0714, + "step": 1522 + }, + { + "epoch": 1.1907740422204847, + "grad_norm": 0.4471852779388428, + "learning_rate": 4.211762621244022e-05, + "loss": 0.0771, + "step": 1523 + }, + { + "epoch": 1.1915559030492573, + "grad_norm": 0.3137734532356262, + "learning_rate": 4.2050250679114625e-05, + "loss": 0.0404, + "step": 1524 + }, + { + "epoch": 1.1923377638780297, + "grad_norm": 0.3927496671676636, + "learning_rate": 4.198288995195021e-05, + "loss": 0.0459, + "step": 1525 + }, + { + "epoch": 1.193119624706802, + "grad_norm": 0.3484167754650116, + "learning_rate": 4.1915544156404284e-05, + "loss": 0.0555, + "step": 1526 + }, + { + "epoch": 1.1939014855355747, + "grad_norm": 0.3492136299610138, + "learning_rate": 4.184821341790629e-05, + "loss": 0.0363, + "step": 1527 + }, + { + "epoch": 1.1946833463643471, + "grad_norm": 0.34315451979637146, + "learning_rate": 4.178089786185761e-05, + "loss": 0.0568, + "step": 1528 + }, + { + "epoch": 1.1954652071931196, + "grad_norm": 0.36132681369781494, + "learning_rate": 4.17135976136314e-05, + "loss": 0.0434, + "step": 1529 + }, + { + "epoch": 1.1962470680218922, + "grad_norm": 0.6220799088478088, + "learning_rate": 4.1646312798572296e-05, + "loss": 0.0789, + "step": 1530 + }, + { + "epoch": 1.1970289288506646, + "grad_norm": 0.3326771855354309, + "learning_rate": 4.1579043541996136e-05, + "loss": 0.0583, + "step": 1531 + }, + { + "epoch": 1.197810789679437, + "grad_norm": 0.5193848609924316, + "learning_rate": 4.151178996918986e-05, + "loss": 0.0977, + "step": 1532 + }, + { + "epoch": 1.1985926505082096, + "grad_norm": 0.3856359124183655, + "learning_rate": 4.144455220541112e-05, + "loss": 0.0419, + "step": 1533 + }, + { + "epoch": 1.199374511336982, + "grad_norm": 0.3599039614200592, + "learning_rate": 4.137733037588822e-05, + "loss": 0.0917, + "step": 1534 + }, + { + "epoch": 1.2001563721657544, + "grad_norm": 0.3522965908050537, + "learning_rate": 4.131012460581967e-05, + "loss": 0.0446, + "step": 1535 + }, + { + "epoch": 1.200938232994527, + "grad_norm": 0.38580793142318726, + "learning_rate": 4.1242935020374165e-05, + "loss": 0.062, + "step": 1536 + }, + { + "epoch": 1.2017200938232995, + "grad_norm": 0.35813602805137634, + "learning_rate": 4.1175761744690225e-05, + "loss": 0.0825, + "step": 1537 + }, + { + "epoch": 1.2025019546520719, + "grad_norm": 0.38828739523887634, + "learning_rate": 4.1108604903875945e-05, + "loss": 0.0868, + "step": 1538 + }, + { + "epoch": 1.2032838154808445, + "grad_norm": 0.40260881185531616, + "learning_rate": 4.10414646230089e-05, + "loss": 0.0488, + "step": 1539 + }, + { + "epoch": 1.204065676309617, + "grad_norm": 0.33392858505249023, + "learning_rate": 4.097434102713577e-05, + "loss": 0.0438, + "step": 1540 + }, + { + "epoch": 1.2048475371383893, + "grad_norm": 0.4407254159450531, + "learning_rate": 4.090723424127216e-05, + "loss": 0.0632, + "step": 1541 + }, + { + "epoch": 1.205629397967162, + "grad_norm": 0.5629727244377136, + "learning_rate": 4.084014439040236e-05, + "loss": 0.0962, + "step": 1542 + }, + { + "epoch": 1.2064112587959344, + "grad_norm": 0.3747621476650238, + "learning_rate": 4.077307159947915e-05, + "loss": 0.0482, + "step": 1543 + }, + { + "epoch": 1.2071931196247068, + "grad_norm": 0.424283891916275, + "learning_rate": 4.070601599342354e-05, + "loss": 0.054, + "step": 1544 + }, + { + "epoch": 1.2079749804534794, + "grad_norm": 0.39465880393981934, + "learning_rate": 4.063897769712448e-05, + "loss": 0.0419, + "step": 1545 + }, + { + "epoch": 1.2087568412822518, + "grad_norm": 0.30999326705932617, + "learning_rate": 4.057195683543873e-05, + "loss": 0.0436, + "step": 1546 + }, + { + "epoch": 1.2095387021110242, + "grad_norm": 0.4226630628108978, + "learning_rate": 4.0504953533190575e-05, + "loss": 0.0725, + "step": 1547 + }, + { + "epoch": 1.2103205629397968, + "grad_norm": 0.31118202209472656, + "learning_rate": 4.043796791517155e-05, + "loss": 0.0439, + "step": 1548 + }, + { + "epoch": 1.2111024237685692, + "grad_norm": 0.36616459488868713, + "learning_rate": 4.037100010614031e-05, + "loss": 0.0534, + "step": 1549 + }, + { + "epoch": 1.2118842845973417, + "grad_norm": 0.47533512115478516, + "learning_rate": 4.0304050230822335e-05, + "loss": 0.0636, + "step": 1550 + }, + { + "epoch": 1.212666145426114, + "grad_norm": 0.396884560585022, + "learning_rate": 4.023711841390966e-05, + "loss": 0.0996, + "step": 1551 + }, + { + "epoch": 1.2134480062548867, + "grad_norm": 0.3450510501861572, + "learning_rate": 4.017020478006073e-05, + "loss": 0.035, + "step": 1552 + }, + { + "epoch": 1.214229867083659, + "grad_norm": 0.48043736815452576, + "learning_rate": 4.010330945390012e-05, + "loss": 0.0527, + "step": 1553 + }, + { + "epoch": 1.2150117279124315, + "grad_norm": 0.4231407940387726, + "learning_rate": 4.0036432560018255e-05, + "loss": 0.0549, + "step": 1554 + }, + { + "epoch": 1.2157935887412041, + "grad_norm": 0.3957667648792267, + "learning_rate": 3.996957422297132e-05, + "loss": 0.0545, + "step": 1555 + }, + { + "epoch": 1.2165754495699765, + "grad_norm": 0.4996613562107086, + "learning_rate": 3.990273456728086e-05, + "loss": 0.0701, + "step": 1556 + }, + { + "epoch": 1.217357310398749, + "grad_norm": 0.42404982447624207, + "learning_rate": 3.98359137174337e-05, + "loss": 0.0573, + "step": 1557 + }, + { + "epoch": 1.2181391712275216, + "grad_norm": 0.30868658423423767, + "learning_rate": 3.9769111797881545e-05, + "loss": 0.0423, + "step": 1558 + }, + { + "epoch": 1.218921032056294, + "grad_norm": 0.40342530608177185, + "learning_rate": 3.9702328933040914e-05, + "loss": 0.0433, + "step": 1559 + }, + { + "epoch": 1.2197028928850664, + "grad_norm": 0.3019714653491974, + "learning_rate": 3.963556524729284e-05, + "loss": 0.0413, + "step": 1560 + }, + { + "epoch": 1.220484753713839, + "grad_norm": 0.4296371042728424, + "learning_rate": 3.9568820864982606e-05, + "loss": 0.0688, + "step": 1561 + }, + { + "epoch": 1.2212666145426114, + "grad_norm": 0.2896515130996704, + "learning_rate": 3.9502095910419515e-05, + "loss": 0.0657, + "step": 1562 + }, + { + "epoch": 1.2220484753713838, + "grad_norm": 0.38489270210266113, + "learning_rate": 3.943539050787678e-05, + "loss": 0.0783, + "step": 1563 + }, + { + "epoch": 1.2228303362001565, + "grad_norm": 0.3944641351699829, + "learning_rate": 3.936870478159109e-05, + "loss": 0.0469, + "step": 1564 + }, + { + "epoch": 1.2236121970289289, + "grad_norm": 0.3929797410964966, + "learning_rate": 3.930203885576257e-05, + "loss": 0.0563, + "step": 1565 + }, + { + "epoch": 1.2243940578577013, + "grad_norm": 0.6391537189483643, + "learning_rate": 3.923539285455442e-05, + "loss": 0.0864, + "step": 1566 + }, + { + "epoch": 1.2251759186864737, + "grad_norm": 0.43830859661102295, + "learning_rate": 3.9168766902092776e-05, + "loss": 0.07, + "step": 1567 + }, + { + "epoch": 1.2259577795152463, + "grad_norm": 0.35931962728500366, + "learning_rate": 3.910216112246636e-05, + "loss": 0.0723, + "step": 1568 + }, + { + "epoch": 1.2267396403440187, + "grad_norm": 0.3321268856525421, + "learning_rate": 3.903557563972641e-05, + "loss": 0.0599, + "step": 1569 + }, + { + "epoch": 1.2275215011727911, + "grad_norm": 0.33983114361763, + "learning_rate": 3.89690105778863e-05, + "loss": 0.0489, + "step": 1570 + }, + { + "epoch": 1.2283033620015638, + "grad_norm": 0.36031773686408997, + "learning_rate": 3.890246606092139e-05, + "loss": 0.0968, + "step": 1571 + }, + { + "epoch": 1.2290852228303362, + "grad_norm": 0.3760223388671875, + "learning_rate": 3.883594221276877e-05, + "loss": 0.0501, + "step": 1572 + }, + { + "epoch": 1.2298670836591086, + "grad_norm": 0.3305293023586273, + "learning_rate": 3.876943915732706e-05, + "loss": 0.0593, + "step": 1573 + }, + { + "epoch": 1.2306489444878812, + "grad_norm": 0.32184192538261414, + "learning_rate": 3.8702957018456124e-05, + "loss": 0.0507, + "step": 1574 + }, + { + "epoch": 1.2314308053166536, + "grad_norm": 0.6274586915969849, + "learning_rate": 3.863649591997688e-05, + "loss": 0.107, + "step": 1575 + }, + { + "epoch": 1.232212666145426, + "grad_norm": 0.4004760980606079, + "learning_rate": 3.857005598567107e-05, + "loss": 0.064, + "step": 1576 + }, + { + "epoch": 1.2329945269741986, + "grad_norm": 0.47167232632637024, + "learning_rate": 3.850363733928098e-05, + "loss": 0.0611, + "step": 1577 + }, + { + "epoch": 1.233776387802971, + "grad_norm": 0.41693300008773804, + "learning_rate": 3.84372401045093e-05, + "loss": 0.0771, + "step": 1578 + }, + { + "epoch": 1.2345582486317435, + "grad_norm": 0.32655128836631775, + "learning_rate": 3.8370864405018816e-05, + "loss": 0.0345, + "step": 1579 + }, + { + "epoch": 1.235340109460516, + "grad_norm": 0.34897708892822266, + "learning_rate": 3.8304510364432225e-05, + "loss": 0.0551, + "step": 1580 + }, + { + "epoch": 1.2361219702892885, + "grad_norm": 0.3144432306289673, + "learning_rate": 3.823817810633181e-05, + "loss": 0.0554, + "step": 1581 + }, + { + "epoch": 1.236903831118061, + "grad_norm": 0.3374468684196472, + "learning_rate": 3.8171867754259396e-05, + "loss": 0.0503, + "step": 1582 + }, + { + "epoch": 1.2376856919468335, + "grad_norm": 0.6711187958717346, + "learning_rate": 3.810557943171594e-05, + "loss": 0.0912, + "step": 1583 + }, + { + "epoch": 1.238467552775606, + "grad_norm": 0.3551695644855499, + "learning_rate": 3.803931326216137e-05, + "loss": 0.0482, + "step": 1584 + }, + { + "epoch": 1.2392494136043783, + "grad_norm": 0.29928022623062134, + "learning_rate": 3.797306936901439e-05, + "loss": 0.0591, + "step": 1585 + }, + { + "epoch": 1.240031274433151, + "grad_norm": 0.29246705770492554, + "learning_rate": 3.790684787565219e-05, + "loss": 0.0671, + "step": 1586 + }, + { + "epoch": 1.2408131352619234, + "grad_norm": 0.442971795797348, + "learning_rate": 3.784064890541022e-05, + "loss": 0.0714, + "step": 1587 + }, + { + "epoch": 1.2415949960906958, + "grad_norm": 0.4673009216785431, + "learning_rate": 3.777447258158201e-05, + "loss": 0.0862, + "step": 1588 + }, + { + "epoch": 1.2423768569194684, + "grad_norm": 0.3445381224155426, + "learning_rate": 3.770831902741895e-05, + "loss": 0.0425, + "step": 1589 + }, + { + "epoch": 1.2431587177482408, + "grad_norm": 0.43028736114501953, + "learning_rate": 3.7642188366129935e-05, + "loss": 0.085, + "step": 1590 + }, + { + "epoch": 1.2439405785770132, + "grad_norm": 0.45845282077789307, + "learning_rate": 3.757608072088129e-05, + "loss": 0.0623, + "step": 1591 + }, + { + "epoch": 1.2447224394057859, + "grad_norm": 0.3896902799606323, + "learning_rate": 3.750999621479643e-05, + "loss": 0.0776, + "step": 1592 + }, + { + "epoch": 1.2455043002345583, + "grad_norm": 0.38196709752082825, + "learning_rate": 3.744393497095573e-05, + "loss": 0.0466, + "step": 1593 + }, + { + "epoch": 1.2462861610633307, + "grad_norm": 0.34162676334381104, + "learning_rate": 3.7377897112396175e-05, + "loss": 0.0342, + "step": 1594 + }, + { + "epoch": 1.2470680218921033, + "grad_norm": 0.4113738238811493, + "learning_rate": 3.7311882762111235e-05, + "loss": 0.0761, + "step": 1595 + }, + { + "epoch": 1.2478498827208757, + "grad_norm": 0.5574212670326233, + "learning_rate": 3.72458920430506e-05, + "loss": 0.0513, + "step": 1596 + }, + { + "epoch": 1.2486317435496481, + "grad_norm": 0.3796621561050415, + "learning_rate": 3.7179925078119905e-05, + "loss": 0.0641, + "step": 1597 + }, + { + "epoch": 1.2494136043784208, + "grad_norm": 0.6831467151641846, + "learning_rate": 3.711398199018059e-05, + "loss": 0.0753, + "step": 1598 + }, + { + "epoch": 1.2501954652071932, + "grad_norm": 0.3368784487247467, + "learning_rate": 3.704806290204963e-05, + "loss": 0.0386, + "step": 1599 + }, + { + "epoch": 1.2509773260359656, + "grad_norm": 0.3436744809150696, + "learning_rate": 3.698216793649925e-05, + "loss": 0.0548, + "step": 1600 + }, + { + "epoch": 1.2509773260359656, + "eval_loss": 0.12688221037387848, + "eval_runtime": 13.396, + "eval_samples_per_second": 3.882, + "eval_steps_per_second": 0.97, + "step": 1600 + }, + { + "epoch": 1.2517591868647382, + "grad_norm": 0.3372763693332672, + "learning_rate": 3.691629721625677e-05, + "loss": 0.0867, + "step": 1601 + }, + { + "epoch": 1.2525410476935106, + "grad_norm": 0.39428460597991943, + "learning_rate": 3.6850450864004376e-05, + "loss": 0.0566, + "step": 1602 + }, + { + "epoch": 1.253322908522283, + "grad_norm": 0.39720073342323303, + "learning_rate": 3.678462900237888e-05, + "loss": 0.0585, + "step": 1603 + }, + { + "epoch": 1.2541047693510556, + "grad_norm": 0.5266603827476501, + "learning_rate": 3.671883175397142e-05, + "loss": 0.0686, + "step": 1604 + }, + { + "epoch": 1.254886630179828, + "grad_norm": 0.3272291123867035, + "learning_rate": 3.6653059241327314e-05, + "loss": 0.0601, + "step": 1605 + }, + { + "epoch": 1.2556684910086005, + "grad_norm": 0.5012699961662292, + "learning_rate": 3.6587311586945875e-05, + "loss": 0.0562, + "step": 1606 + }, + { + "epoch": 1.256450351837373, + "grad_norm": 0.3750763237476349, + "learning_rate": 3.6521588913280024e-05, + "loss": 0.0462, + "step": 1607 + }, + { + "epoch": 1.2572322126661455, + "grad_norm": 0.5167805552482605, + "learning_rate": 3.6455891342736215e-05, + "loss": 0.066, + "step": 1608 + }, + { + "epoch": 1.258014073494918, + "grad_norm": 0.31887608766555786, + "learning_rate": 3.6390218997674144e-05, + "loss": 0.0815, + "step": 1609 + }, + { + "epoch": 1.2587959343236903, + "grad_norm": 0.3656921684741974, + "learning_rate": 3.6324572000406486e-05, + "loss": 0.0558, + "step": 1610 + }, + { + "epoch": 1.259577795152463, + "grad_norm": 0.4891717731952667, + "learning_rate": 3.625895047319875e-05, + "loss": 0.0633, + "step": 1611 + }, + { + "epoch": 1.2603596559812353, + "grad_norm": 0.3321298658847809, + "learning_rate": 3.619335453826901e-05, + "loss": 0.0392, + "step": 1612 + }, + { + "epoch": 1.2611415168100077, + "grad_norm": 0.3826375901699066, + "learning_rate": 3.6127784317787625e-05, + "loss": 0.0726, + "step": 1613 + }, + { + "epoch": 1.2619233776387804, + "grad_norm": 0.5942705869674683, + "learning_rate": 3.6062239933877116e-05, + "loss": 0.0724, + "step": 1614 + }, + { + "epoch": 1.2627052384675528, + "grad_norm": 0.44333893060684204, + "learning_rate": 3.5996721508611854e-05, + "loss": 0.0612, + "step": 1615 + }, + { + "epoch": 1.2634870992963252, + "grad_norm": 0.4055474102497101, + "learning_rate": 3.5931229164017886e-05, + "loss": 0.0503, + "step": 1616 + }, + { + "epoch": 1.2642689601250976, + "grad_norm": 0.34622856974601746, + "learning_rate": 3.586576302207264e-05, + "loss": 0.0618, + "step": 1617 + }, + { + "epoch": 1.2650508209538702, + "grad_norm": 0.33253592252731323, + "learning_rate": 3.58003232047048e-05, + "loss": 0.032, + "step": 1618 + }, + { + "epoch": 1.2658326817826426, + "grad_norm": 0.30954334139823914, + "learning_rate": 3.573490983379399e-05, + "loss": 0.0387, + "step": 1619 + }, + { + "epoch": 1.266614542611415, + "grad_norm": 0.38260695338249207, + "learning_rate": 3.566952303117056e-05, + "loss": 0.0829, + "step": 1620 + }, + { + "epoch": 1.2673964034401877, + "grad_norm": 0.3483465313911438, + "learning_rate": 3.5604162918615413e-05, + "loss": 0.0441, + "step": 1621 + }, + { + "epoch": 1.26817826426896, + "grad_norm": 0.49197712540626526, + "learning_rate": 3.5538829617859746e-05, + "loss": 0.0676, + "step": 1622 + }, + { + "epoch": 1.2689601250977325, + "grad_norm": 0.29698431491851807, + "learning_rate": 3.5473523250584765e-05, + "loss": 0.0313, + "step": 1623 + }, + { + "epoch": 1.2697419859265051, + "grad_norm": 0.4328691363334656, + "learning_rate": 3.5408243938421584e-05, + "loss": 0.0637, + "step": 1624 + }, + { + "epoch": 1.2705238467552775, + "grad_norm": 0.41575777530670166, + "learning_rate": 3.534299180295087e-05, + "loss": 0.0948, + "step": 1625 + }, + { + "epoch": 1.27130570758405, + "grad_norm": 0.42420047521591187, + "learning_rate": 3.5277766965702734e-05, + "loss": 0.0786, + "step": 1626 + }, + { + "epoch": 1.2720875684128226, + "grad_norm": 0.3792000114917755, + "learning_rate": 3.5212569548156366e-05, + "loss": 0.0797, + "step": 1627 + }, + { + "epoch": 1.272869429241595, + "grad_norm": 0.2699577808380127, + "learning_rate": 3.514739967173996e-05, + "loss": 0.0615, + "step": 1628 + }, + { + "epoch": 1.2736512900703674, + "grad_norm": 0.37993887066841125, + "learning_rate": 3.508225745783039e-05, + "loss": 0.0734, + "step": 1629 + }, + { + "epoch": 1.27443315089914, + "grad_norm": 0.3054560720920563, + "learning_rate": 3.501714302775297e-05, + "loss": 0.0452, + "step": 1630 + }, + { + "epoch": 1.2752150117279124, + "grad_norm": 0.33489546179771423, + "learning_rate": 3.4952056502781336e-05, + "loss": 0.0378, + "step": 1631 + }, + { + "epoch": 1.2759968725566848, + "grad_norm": 0.365928053855896, + "learning_rate": 3.488699800413714e-05, + "loss": 0.0532, + "step": 1632 + }, + { + "epoch": 1.2767787333854574, + "grad_norm": 0.3355133831501007, + "learning_rate": 3.4821967652989764e-05, + "loss": 0.0428, + "step": 1633 + }, + { + "epoch": 1.2775605942142298, + "grad_norm": 0.3714124262332916, + "learning_rate": 3.4756965570456236e-05, + "loss": 0.0655, + "step": 1634 + }, + { + "epoch": 1.2783424550430023, + "grad_norm": 0.3428504765033722, + "learning_rate": 3.469199187760094e-05, + "loss": 0.0665, + "step": 1635 + }, + { + "epoch": 1.2791243158717749, + "grad_norm": 0.36663559079170227, + "learning_rate": 3.462704669543532e-05, + "loss": 0.0497, + "step": 1636 + }, + { + "epoch": 1.2799061767005473, + "grad_norm": 0.38190585374832153, + "learning_rate": 3.456213014491778e-05, + "loss": 0.0827, + "step": 1637 + }, + { + "epoch": 1.2806880375293197, + "grad_norm": 0.3864405155181885, + "learning_rate": 3.449724234695339e-05, + "loss": 0.0711, + "step": 1638 + }, + { + "epoch": 1.2814698983580923, + "grad_norm": 0.40252625942230225, + "learning_rate": 3.4432383422393666e-05, + "loss": 0.0581, + "step": 1639 + }, + { + "epoch": 1.2822517591868647, + "grad_norm": 0.4013853967189789, + "learning_rate": 3.43675534920363e-05, + "loss": 0.0414, + "step": 1640 + }, + { + "epoch": 1.2830336200156371, + "grad_norm": 0.37079882621765137, + "learning_rate": 3.430275267662505e-05, + "loss": 0.0458, + "step": 1641 + }, + { + "epoch": 1.2838154808444098, + "grad_norm": 0.4159170091152191, + "learning_rate": 3.4237981096849443e-05, + "loss": 0.0513, + "step": 1642 + }, + { + "epoch": 1.2845973416731822, + "grad_norm": 0.34236016869544983, + "learning_rate": 3.41732388733445e-05, + "loss": 0.0569, + "step": 1643 + }, + { + "epoch": 1.2853792025019546, + "grad_norm": 0.4664420485496521, + "learning_rate": 3.41085261266906e-05, + "loss": 0.0694, + "step": 1644 + }, + { + "epoch": 1.2861610633307272, + "grad_norm": 0.4391569495201111, + "learning_rate": 3.404384297741326e-05, + "loss": 0.0553, + "step": 1645 + }, + { + "epoch": 1.2869429241594996, + "grad_norm": 0.4465954899787903, + "learning_rate": 3.39791895459828e-05, + "loss": 0.0657, + "step": 1646 + }, + { + "epoch": 1.287724784988272, + "grad_norm": 0.32883110642433167, + "learning_rate": 3.3914565952814237e-05, + "loss": 0.0649, + "step": 1647 + }, + { + "epoch": 1.2885066458170447, + "grad_norm": 0.4377223551273346, + "learning_rate": 3.384997231826702e-05, + "loss": 0.0604, + "step": 1648 + }, + { + "epoch": 1.289288506645817, + "grad_norm": 0.4015994369983673, + "learning_rate": 3.378540876264479e-05, + "loss": 0.0699, + "step": 1649 + }, + { + "epoch": 1.2900703674745895, + "grad_norm": 0.46162915229797363, + "learning_rate": 3.372087540619513e-05, + "loss": 0.0503, + "step": 1650 + }, + { + "epoch": 1.290852228303362, + "grad_norm": 0.5002314448356628, + "learning_rate": 3.365637236910944e-05, + "loss": 0.07, + "step": 1651 + }, + { + "epoch": 1.2916340891321345, + "grad_norm": 0.4243912994861603, + "learning_rate": 3.359189977152264e-05, + "loss": 0.0558, + "step": 1652 + }, + { + "epoch": 1.292415949960907, + "grad_norm": 0.32526394724845886, + "learning_rate": 3.35274577335129e-05, + "loss": 0.049, + "step": 1653 + }, + { + "epoch": 1.2931978107896795, + "grad_norm": 0.4108484089374542, + "learning_rate": 3.346304637510153e-05, + "loss": 0.0648, + "step": 1654 + }, + { + "epoch": 1.293979671618452, + "grad_norm": 0.4533168077468872, + "learning_rate": 3.3398665816252705e-05, + "loss": 0.1103, + "step": 1655 + }, + { + "epoch": 1.2947615324472244, + "grad_norm": 0.3625294864177704, + "learning_rate": 3.333431617687318e-05, + "loss": 0.0387, + "step": 1656 + }, + { + "epoch": 1.295543393275997, + "grad_norm": 0.32721203565597534, + "learning_rate": 3.326999757681218e-05, + "loss": 0.0432, + "step": 1657 + }, + { + "epoch": 1.2963252541047694, + "grad_norm": 0.31126266717910767, + "learning_rate": 3.320571013586111e-05, + "loss": 0.0412, + "step": 1658 + }, + { + "epoch": 1.2971071149335418, + "grad_norm": 0.4804350435733795, + "learning_rate": 3.3141453973753324e-05, + "loss": 0.0841, + "step": 1659 + }, + { + "epoch": 1.2978889757623144, + "grad_norm": 0.38014477491378784, + "learning_rate": 3.3077229210163906e-05, + "loss": 0.0645, + "step": 1660 + }, + { + "epoch": 1.2986708365910868, + "grad_norm": 0.41697919368743896, + "learning_rate": 3.301303596470951e-05, + "loss": 0.0868, + "step": 1661 + }, + { + "epoch": 1.2994526974198592, + "grad_norm": 0.3734145760536194, + "learning_rate": 3.2948874356948074e-05, + "loss": 0.0526, + "step": 1662 + }, + { + "epoch": 1.3002345582486319, + "grad_norm": 0.3617169260978699, + "learning_rate": 3.288474450637857e-05, + "loss": 0.0427, + "step": 1663 + }, + { + "epoch": 1.3010164190774043, + "grad_norm": 0.43751880526542664, + "learning_rate": 3.2820646532440846e-05, + "loss": 0.0608, + "step": 1664 + }, + { + "epoch": 1.3017982799061767, + "grad_norm": 0.3905401825904846, + "learning_rate": 3.2756580554515434e-05, + "loss": 0.1049, + "step": 1665 + }, + { + "epoch": 1.302580140734949, + "grad_norm": 0.5550699234008789, + "learning_rate": 3.2692546691923174e-05, + "loss": 0.0957, + "step": 1666 + }, + { + "epoch": 1.3033620015637217, + "grad_norm": 0.47704434394836426, + "learning_rate": 3.262854506392519e-05, + "loss": 0.0875, + "step": 1667 + }, + { + "epoch": 1.3041438623924941, + "grad_norm": 0.44277340173721313, + "learning_rate": 3.256457578972251e-05, + "loss": 0.0608, + "step": 1668 + }, + { + "epoch": 1.3049257232212665, + "grad_norm": 0.30996251106262207, + "learning_rate": 3.25006389884559e-05, + "loss": 0.029, + "step": 1669 + }, + { + "epoch": 1.3057075840500392, + "grad_norm": 0.4079422652721405, + "learning_rate": 3.243673477920569e-05, + "loss": 0.0435, + "step": 1670 + }, + { + "epoch": 1.3064894448788116, + "grad_norm": 0.34746474027633667, + "learning_rate": 3.237286328099149e-05, + "loss": 0.0406, + "step": 1671 + }, + { + "epoch": 1.307271305707584, + "grad_norm": 0.3626313805580139, + "learning_rate": 3.230902461277196e-05, + "loss": 0.0358, + "step": 1672 + }, + { + "epoch": 1.3080531665363564, + "grad_norm": 0.40878042578697205, + "learning_rate": 3.2245218893444664e-05, + "loss": 0.0539, + "step": 1673 + }, + { + "epoch": 1.308835027365129, + "grad_norm": 0.6351624727249146, + "learning_rate": 3.218144624184574e-05, + "loss": 0.0833, + "step": 1674 + }, + { + "epoch": 1.3096168881939014, + "grad_norm": 0.40278536081314087, + "learning_rate": 3.2117706776749815e-05, + "loss": 0.0458, + "step": 1675 + }, + { + "epoch": 1.3103987490226738, + "grad_norm": 0.3833138942718506, + "learning_rate": 3.205400061686961e-05, + "loss": 0.0441, + "step": 1676 + }, + { + "epoch": 1.3111806098514465, + "grad_norm": 0.3146219551563263, + "learning_rate": 3.19903278808559e-05, + "loss": 0.0815, + "step": 1677 + }, + { + "epoch": 1.3119624706802189, + "grad_norm": 0.38004133105278015, + "learning_rate": 3.192668868729719e-05, + "loss": 0.0356, + "step": 1678 + }, + { + "epoch": 1.3127443315089913, + "grad_norm": 0.41840022802352905, + "learning_rate": 3.186308315471947e-05, + "loss": 0.0433, + "step": 1679 + }, + { + "epoch": 1.313526192337764, + "grad_norm": 0.5004205107688904, + "learning_rate": 3.179951140158608e-05, + "loss": 0.0702, + "step": 1680 + }, + { + "epoch": 1.3143080531665363, + "grad_norm": 0.39277809858322144, + "learning_rate": 3.173597354629746e-05, + "loss": 0.0623, + "step": 1681 + }, + { + "epoch": 1.3150899139953087, + "grad_norm": 0.41982904076576233, + "learning_rate": 3.1672469707190864e-05, + "loss": 0.0658, + "step": 1682 + }, + { + "epoch": 1.3158717748240814, + "grad_norm": 0.41825437545776367, + "learning_rate": 3.1609000002540226e-05, + "loss": 0.0576, + "step": 1683 + }, + { + "epoch": 1.3166536356528538, + "grad_norm": 0.391253262758255, + "learning_rate": 3.15455645505559e-05, + "loss": 0.0411, + "step": 1684 + }, + { + "epoch": 1.3174354964816262, + "grad_norm": 0.587203323841095, + "learning_rate": 3.148216346938446e-05, + "loss": 0.0591, + "step": 1685 + }, + { + "epoch": 1.3182173573103988, + "grad_norm": 0.39162901043891907, + "learning_rate": 3.141879687710841e-05, + "loss": 0.0608, + "step": 1686 + }, + { + "epoch": 1.3189992181391712, + "grad_norm": 0.4081566035747528, + "learning_rate": 3.135546489174612e-05, + "loss": 0.0481, + "step": 1687 + }, + { + "epoch": 1.3197810789679436, + "grad_norm": 0.35251811146736145, + "learning_rate": 3.1292167631251414e-05, + "loss": 0.0419, + "step": 1688 + }, + { + "epoch": 1.3205629397967162, + "grad_norm": 0.45182615518569946, + "learning_rate": 3.122890521351345e-05, + "loss": 0.0776, + "step": 1689 + }, + { + "epoch": 1.3213448006254886, + "grad_norm": 0.39525046944618225, + "learning_rate": 3.116567775635654e-05, + "loss": 0.0627, + "step": 1690 + }, + { + "epoch": 1.322126661454261, + "grad_norm": 0.5914624929428101, + "learning_rate": 3.1102485377539886e-05, + "loss": 0.0611, + "step": 1691 + }, + { + "epoch": 1.3229085222830337, + "grad_norm": 0.43132856488227844, + "learning_rate": 3.103932819475729e-05, + "loss": 0.0544, + "step": 1692 + }, + { + "epoch": 1.323690383111806, + "grad_norm": 0.4183245301246643, + "learning_rate": 3.0976206325637046e-05, + "loss": 0.0684, + "step": 1693 + }, + { + "epoch": 1.3244722439405785, + "grad_norm": 0.4382965564727783, + "learning_rate": 3.0913119887741716e-05, + "loss": 0.0352, + "step": 1694 + }, + { + "epoch": 1.3252541047693511, + "grad_norm": 0.5051260590553284, + "learning_rate": 3.085006899856777e-05, + "loss": 0.0728, + "step": 1695 + }, + { + "epoch": 1.3260359655981235, + "grad_norm": 0.39223799109458923, + "learning_rate": 3.078705377554558e-05, + "loss": 0.0612, + "step": 1696 + }, + { + "epoch": 1.326817826426896, + "grad_norm": 0.44544515013694763, + "learning_rate": 3.072407433603901e-05, + "loss": 0.0232, + "step": 1697 + }, + { + "epoch": 1.3275996872556686, + "grad_norm": 0.540951669216156, + "learning_rate": 3.066113079734535e-05, + "loss": 0.0704, + "step": 1698 + }, + { + "epoch": 1.328381548084441, + "grad_norm": 0.3875698149204254, + "learning_rate": 3.059822327669494e-05, + "loss": 0.0508, + "step": 1699 + }, + { + "epoch": 1.3291634089132134, + "grad_norm": 0.4373841881752014, + "learning_rate": 3.0535351891251094e-05, + "loss": 0.0275, + "step": 1700 + }, + { + "epoch": 1.329945269741986, + "grad_norm": 0.5520462393760681, + "learning_rate": 3.0472516758109847e-05, + "loss": 0.0665, + "step": 1701 + }, + { + "epoch": 1.3307271305707584, + "grad_norm": 0.5406703352928162, + "learning_rate": 3.040971799429966e-05, + "loss": 0.0559, + "step": 1702 + }, + { + "epoch": 1.3315089913995308, + "grad_norm": 0.4071379005908966, + "learning_rate": 3.034695571678127e-05, + "loss": 0.0572, + "step": 1703 + }, + { + "epoch": 1.3322908522283035, + "grad_norm": 0.37903717160224915, + "learning_rate": 3.0284230042447493e-05, + "loss": 0.046, + "step": 1704 + }, + { + "epoch": 1.3330727130570759, + "grad_norm": 0.4394493103027344, + "learning_rate": 3.0221541088122917e-05, + "loss": 0.0527, + "step": 1705 + }, + { + "epoch": 1.3338545738858483, + "grad_norm": 0.404144287109375, + "learning_rate": 3.015888897056381e-05, + "loss": 0.0903, + "step": 1706 + }, + { + "epoch": 1.334636434714621, + "grad_norm": 0.453784704208374, + "learning_rate": 3.0096273806457763e-05, + "loss": 0.0466, + "step": 1707 + }, + { + "epoch": 1.3354182955433933, + "grad_norm": 0.4025675654411316, + "learning_rate": 3.0033695712423603e-05, + "loss": 0.119, + "step": 1708 + }, + { + "epoch": 1.3362001563721657, + "grad_norm": 0.4505664110183716, + "learning_rate": 2.997115480501106e-05, + "loss": 0.092, + "step": 1709 + }, + { + "epoch": 1.3369820172009383, + "grad_norm": 0.4255506992340088, + "learning_rate": 2.9908651200700655e-05, + "loss": 0.0506, + "step": 1710 + }, + { + "epoch": 1.3377638780297108, + "grad_norm": 0.30817708373069763, + "learning_rate": 2.9846185015903394e-05, + "loss": 0.0429, + "step": 1711 + }, + { + "epoch": 1.3385457388584832, + "grad_norm": 0.3303023874759674, + "learning_rate": 2.9783756366960613e-05, + "loss": 0.0814, + "step": 1712 + }, + { + "epoch": 1.3393275996872558, + "grad_norm": 0.4083544909954071, + "learning_rate": 2.9721365370143722e-05, + "loss": 0.1116, + "step": 1713 + }, + { + "epoch": 1.3401094605160282, + "grad_norm": 0.321193665266037, + "learning_rate": 2.965901214165404e-05, + "loss": 0.0355, + "step": 1714 + }, + { + "epoch": 1.3408913213448006, + "grad_norm": 0.30505916476249695, + "learning_rate": 2.959669679762248e-05, + "loss": 0.0242, + "step": 1715 + }, + { + "epoch": 1.3416731821735732, + "grad_norm": 0.3857214152812958, + "learning_rate": 2.9534419454109476e-05, + "loss": 0.0725, + "step": 1716 + }, + { + "epoch": 1.3424550430023456, + "grad_norm": 0.3174574673175812, + "learning_rate": 2.9472180227104628e-05, + "loss": 0.0465, + "step": 1717 + }, + { + "epoch": 1.343236903831118, + "grad_norm": 0.4131394028663635, + "learning_rate": 2.940997923252654e-05, + "loss": 0.079, + "step": 1718 + }, + { + "epoch": 1.3440187646598907, + "grad_norm": 0.3730389475822449, + "learning_rate": 2.9347816586222654e-05, + "loss": 0.0444, + "step": 1719 + }, + { + "epoch": 1.344800625488663, + "grad_norm": 0.39177075028419495, + "learning_rate": 2.9285692403968977e-05, + "loss": 0.0683, + "step": 1720 + }, + { + "epoch": 1.3455824863174355, + "grad_norm": 0.31527620553970337, + "learning_rate": 2.9223606801469882e-05, + "loss": 0.0887, + "step": 1721 + }, + { + "epoch": 1.346364347146208, + "grad_norm": 0.3214835524559021, + "learning_rate": 2.9161559894357827e-05, + "loss": 0.0491, + "step": 1722 + }, + { + "epoch": 1.3471462079749805, + "grad_norm": 0.4698956608772278, + "learning_rate": 2.909955179819328e-05, + "loss": 0.0489, + "step": 1723 + }, + { + "epoch": 1.347928068803753, + "grad_norm": 0.42727044224739075, + "learning_rate": 2.9037582628464398e-05, + "loss": 0.0557, + "step": 1724 + }, + { + "epoch": 1.3487099296325253, + "grad_norm": 0.6430052518844604, + "learning_rate": 2.8975652500586826e-05, + "loss": 0.0917, + "step": 1725 + }, + { + "epoch": 1.349491790461298, + "grad_norm": 0.36423027515411377, + "learning_rate": 2.8913761529903484e-05, + "loss": 0.0952, + "step": 1726 + }, + { + "epoch": 1.3502736512900704, + "grad_norm": 0.3790799677371979, + "learning_rate": 2.885190983168444e-05, + "loss": 0.068, + "step": 1727 + }, + { + "epoch": 1.3510555121188428, + "grad_norm": 0.45250916481018066, + "learning_rate": 2.8790097521126507e-05, + "loss": 0.0534, + "step": 1728 + }, + { + "epoch": 1.3518373729476152, + "grad_norm": 0.3944171965122223, + "learning_rate": 2.8728324713353194e-05, + "loss": 0.0531, + "step": 1729 + }, + { + "epoch": 1.3526192337763878, + "grad_norm": 0.3291238844394684, + "learning_rate": 2.8666591523414475e-05, + "loss": 0.0625, + "step": 1730 + }, + { + "epoch": 1.3534010946051602, + "grad_norm": 0.5208644866943359, + "learning_rate": 2.860489806628648e-05, + "loss": 0.0516, + "step": 1731 + }, + { + "epoch": 1.3541829554339326, + "grad_norm": 0.3720276951789856, + "learning_rate": 2.854324445687135e-05, + "loss": 0.052, + "step": 1732 + }, + { + "epoch": 1.3549648162627053, + "grad_norm": 0.3863518536090851, + "learning_rate": 2.8481630809997022e-05, + "loss": 0.0671, + "step": 1733 + }, + { + "epoch": 1.3557466770914777, + "grad_norm": 0.4168684482574463, + "learning_rate": 2.842005724041701e-05, + "loss": 0.0646, + "step": 1734 + }, + { + "epoch": 1.35652853792025, + "grad_norm": 0.33291158080101013, + "learning_rate": 2.8358523862810155e-05, + "loss": 0.0565, + "step": 1735 + }, + { + "epoch": 1.3573103987490227, + "grad_norm": 0.36476758122444153, + "learning_rate": 2.8297030791780455e-05, + "loss": 0.0538, + "step": 1736 + }, + { + "epoch": 1.3580922595777951, + "grad_norm": 0.3390056788921356, + "learning_rate": 2.823557814185691e-05, + "loss": 0.0636, + "step": 1737 + }, + { + "epoch": 1.3588741204065675, + "grad_norm": 0.5852319598197937, + "learning_rate": 2.81741660274931e-05, + "loss": 0.1102, + "step": 1738 + }, + { + "epoch": 1.3596559812353401, + "grad_norm": 0.34502658247947693, + "learning_rate": 2.811279456306718e-05, + "loss": 0.0382, + "step": 1739 + }, + { + "epoch": 1.3604378420641126, + "grad_norm": 0.3496917188167572, + "learning_rate": 2.8051463862881665e-05, + "loss": 0.0833, + "step": 1740 + }, + { + "epoch": 1.361219702892885, + "grad_norm": 0.5584290027618408, + "learning_rate": 2.7990174041162997e-05, + "loss": 0.0919, + "step": 1741 + }, + { + "epoch": 1.3620015637216576, + "grad_norm": 0.3309808671474457, + "learning_rate": 2.792892521206163e-05, + "loss": 0.0522, + "step": 1742 + }, + { + "epoch": 1.36278342455043, + "grad_norm": 0.4036805331707001, + "learning_rate": 2.7867717489651567e-05, + "loss": 0.0644, + "step": 1743 + }, + { + "epoch": 1.3635652853792024, + "grad_norm": 0.2975353002548218, + "learning_rate": 2.7806550987930314e-05, + "loss": 0.061, + "step": 1744 + }, + { + "epoch": 1.364347146207975, + "grad_norm": 0.4004823863506317, + "learning_rate": 2.7745425820818567e-05, + "loss": 0.0501, + "step": 1745 + }, + { + "epoch": 1.3651290070367474, + "grad_norm": 0.4823578894138336, + "learning_rate": 2.7684342102160034e-05, + "loss": 0.0705, + "step": 1746 + }, + { + "epoch": 1.3659108678655199, + "grad_norm": 0.40492135286331177, + "learning_rate": 2.7623299945721302e-05, + "loss": 0.0546, + "step": 1747 + }, + { + "epoch": 1.3666927286942925, + "grad_norm": 0.3871229887008667, + "learning_rate": 2.7562299465191432e-05, + "loss": 0.0514, + "step": 1748 + }, + { + "epoch": 1.3674745895230649, + "grad_norm": 0.43121662735939026, + "learning_rate": 2.7501340774181934e-05, + "loss": 0.0724, + "step": 1749 + }, + { + "epoch": 1.3682564503518373, + "grad_norm": 0.4135430157184601, + "learning_rate": 2.7440423986226527e-05, + "loss": 0.0584, + "step": 1750 + }, + { + "epoch": 1.36903831118061, + "grad_norm": 0.36338895559310913, + "learning_rate": 2.7379549214780754e-05, + "loss": 0.0534, + "step": 1751 + }, + { + "epoch": 1.3698201720093823, + "grad_norm": 0.46101894974708557, + "learning_rate": 2.7318716573222053e-05, + "loss": 0.0596, + "step": 1752 + }, + { + "epoch": 1.3706020328381547, + "grad_norm": 0.3271227180957794, + "learning_rate": 2.7257926174849328e-05, + "loss": 0.0326, + "step": 1753 + }, + { + "epoch": 1.3713838936669274, + "grad_norm": 0.3329024612903595, + "learning_rate": 2.719717813288275e-05, + "loss": 0.0499, + "step": 1754 + }, + { + "epoch": 1.3721657544956998, + "grad_norm": 0.49644121527671814, + "learning_rate": 2.713647256046371e-05, + "loss": 0.069, + "step": 1755 + }, + { + "epoch": 1.3729476153244722, + "grad_norm": 0.32194784283638, + "learning_rate": 2.707580957065445e-05, + "loss": 0.0671, + "step": 1756 + }, + { + "epoch": 1.3737294761532448, + "grad_norm": 0.31142404675483704, + "learning_rate": 2.7015189276437884e-05, + "loss": 0.0714, + "step": 1757 + }, + { + "epoch": 1.3745113369820172, + "grad_norm": 0.4320639967918396, + "learning_rate": 2.6954611790717445e-05, + "loss": 0.0679, + "step": 1758 + }, + { + "epoch": 1.3752931978107896, + "grad_norm": 0.4968913495540619, + "learning_rate": 2.6894077226316783e-05, + "loss": 0.067, + "step": 1759 + }, + { + "epoch": 1.3760750586395623, + "grad_norm": 0.44248491525650024, + "learning_rate": 2.6833585695979714e-05, + "loss": 0.0791, + "step": 1760 + }, + { + "epoch": 1.3768569194683347, + "grad_norm": 0.3351903259754181, + "learning_rate": 2.6773137312369756e-05, + "loss": 0.0408, + "step": 1761 + }, + { + "epoch": 1.377638780297107, + "grad_norm": 0.3830302059650421, + "learning_rate": 2.6712732188070198e-05, + "loss": 0.0586, + "step": 1762 + }, + { + "epoch": 1.3784206411258797, + "grad_norm": 0.30592599511146545, + "learning_rate": 2.6652370435583712e-05, + "loss": 0.0456, + "step": 1763 + }, + { + "epoch": 1.379202501954652, + "grad_norm": 0.33059370517730713, + "learning_rate": 2.6592052167332126e-05, + "loss": 0.0443, + "step": 1764 + }, + { + "epoch": 1.3799843627834245, + "grad_norm": 0.3415541350841522, + "learning_rate": 2.6531777495656406e-05, + "loss": 0.0372, + "step": 1765 + }, + { + "epoch": 1.3807662236121971, + "grad_norm": 0.5239612460136414, + "learning_rate": 2.6471546532816238e-05, + "loss": 0.0576, + "step": 1766 + }, + { + "epoch": 1.3815480844409695, + "grad_norm": 0.4342079162597656, + "learning_rate": 2.641135939098991e-05, + "loss": 0.0621, + "step": 1767 + }, + { + "epoch": 1.382329945269742, + "grad_norm": 0.42560240626335144, + "learning_rate": 2.6351216182274107e-05, + "loss": 0.0423, + "step": 1768 + }, + { + "epoch": 1.3831118060985146, + "grad_norm": 0.36572739481925964, + "learning_rate": 2.6291117018683676e-05, + "loss": 0.0657, + "step": 1769 + }, + { + "epoch": 1.383893666927287, + "grad_norm": 0.46662601828575134, + "learning_rate": 2.6231062012151487e-05, + "loss": 0.0828, + "step": 1770 + }, + { + "epoch": 1.3846755277560594, + "grad_norm": 0.34814974665641785, + "learning_rate": 2.6171051274528057e-05, + "loss": 0.0488, + "step": 1771 + }, + { + "epoch": 1.385457388584832, + "grad_norm": 0.4473120868206024, + "learning_rate": 2.611108491758158e-05, + "loss": 0.0564, + "step": 1772 + }, + { + "epoch": 1.3862392494136044, + "grad_norm": 0.5653979778289795, + "learning_rate": 2.6051163052997528e-05, + "loss": 0.0885, + "step": 1773 + }, + { + "epoch": 1.3870211102423768, + "grad_norm": 0.38065004348754883, + "learning_rate": 2.5991285792378457e-05, + "loss": 0.0526, + "step": 1774 + }, + { + "epoch": 1.3878029710711495, + "grad_norm": 0.4245047867298126, + "learning_rate": 2.5931453247243963e-05, + "loss": 0.0553, + "step": 1775 + }, + { + "epoch": 1.3885848318999219, + "grad_norm": 0.4021928310394287, + "learning_rate": 2.5871665529030286e-05, + "loss": 0.05, + "step": 1776 + }, + { + "epoch": 1.3893666927286943, + "grad_norm": 0.4062913656234741, + "learning_rate": 2.5811922749090188e-05, + "loss": 0.0593, + "step": 1777 + }, + { + "epoch": 1.3901485535574667, + "grad_norm": 0.3696041405200958, + "learning_rate": 2.5752225018692745e-05, + "loss": 0.0567, + "step": 1778 + }, + { + "epoch": 1.3909304143862393, + "grad_norm": 0.42013299465179443, + "learning_rate": 2.569257244902311e-05, + "loss": 0.0666, + "step": 1779 + }, + { + "epoch": 1.3917122752150117, + "grad_norm": 0.530455470085144, + "learning_rate": 2.56329651511824e-05, + "loss": 0.0424, + "step": 1780 + }, + { + "epoch": 1.3924941360437841, + "grad_norm": 0.46744510531425476, + "learning_rate": 2.5573403236187287e-05, + "loss": 0.0484, + "step": 1781 + }, + { + "epoch": 1.3932759968725568, + "grad_norm": 0.39260637760162354, + "learning_rate": 2.551388681497e-05, + "loss": 0.0457, + "step": 1782 + }, + { + "epoch": 1.3940578577013292, + "grad_norm": 0.379202276468277, + "learning_rate": 2.5454415998378073e-05, + "loss": 0.057, + "step": 1783 + }, + { + "epoch": 1.3948397185301016, + "grad_norm": 0.4000501036643982, + "learning_rate": 2.5394990897173987e-05, + "loss": 0.0752, + "step": 1784 + }, + { + "epoch": 1.395621579358874, + "grad_norm": 0.3499116897583008, + "learning_rate": 2.5335611622035198e-05, + "loss": 0.0469, + "step": 1785 + }, + { + "epoch": 1.3964034401876466, + "grad_norm": 0.4184105694293976, + "learning_rate": 2.5276278283553746e-05, + "loss": 0.0524, + "step": 1786 + }, + { + "epoch": 1.397185301016419, + "grad_norm": 0.31106022000312805, + "learning_rate": 2.5216990992236135e-05, + "loss": 0.0615, + "step": 1787 + }, + { + "epoch": 1.3979671618451914, + "grad_norm": 0.44269534945487976, + "learning_rate": 2.51577498585031e-05, + "loss": 0.0368, + "step": 1788 + }, + { + "epoch": 1.398749022673964, + "grad_norm": 0.3069053888320923, + "learning_rate": 2.509855499268938e-05, + "loss": 0.026, + "step": 1789 + }, + { + "epoch": 1.3995308835027365, + "grad_norm": 0.38774728775024414, + "learning_rate": 2.5039406505043662e-05, + "loss": 0.0392, + "step": 1790 + }, + { + "epoch": 1.4003127443315089, + "grad_norm": 0.5503056645393372, + "learning_rate": 2.498030450572808e-05, + "loss": 0.0639, + "step": 1791 + }, + { + "epoch": 1.4010946051602815, + "grad_norm": 0.2796127498149872, + "learning_rate": 2.492124910481829e-05, + "loss": 0.0671, + "step": 1792 + }, + { + "epoch": 1.401876465989054, + "grad_norm": 0.811257541179657, + "learning_rate": 2.486224041230319e-05, + "loss": 0.0987, + "step": 1793 + }, + { + "epoch": 1.4026583268178263, + "grad_norm": 0.27601099014282227, + "learning_rate": 2.4803278538084568e-05, + "loss": 0.0403, + "step": 1794 + }, + { + "epoch": 1.403440187646599, + "grad_norm": 0.3818424642086029, + "learning_rate": 2.474436359197714e-05, + "loss": 0.0441, + "step": 1795 + }, + { + "epoch": 1.4042220484753714, + "grad_norm": 0.37289881706237793, + "learning_rate": 2.468549568370813e-05, + "loss": 0.0528, + "step": 1796 + }, + { + "epoch": 1.4050039093041438, + "grad_norm": 0.3854784369468689, + "learning_rate": 2.4626674922917207e-05, + "loss": 0.0645, + "step": 1797 + }, + { + "epoch": 1.4057857701329164, + "grad_norm": 0.38945889472961426, + "learning_rate": 2.4567901419156207e-05, + "loss": 0.072, + "step": 1798 + }, + { + "epoch": 1.4065676309616888, + "grad_norm": 0.35413599014282227, + "learning_rate": 2.4509175281888957e-05, + "loss": 0.0319, + "step": 1799 + }, + { + "epoch": 1.4073494917904612, + "grad_norm": 0.32834237813949585, + "learning_rate": 2.4450496620491053e-05, + "loss": 0.0323, + "step": 1800 + }, + { + "epoch": 1.4073494917904612, + "eval_loss": 0.12402170151472092, + "eval_runtime": 13.3717, + "eval_samples_per_second": 3.889, + "eval_steps_per_second": 0.972, + "step": 1800 + }, + { + "epoch": 1.4081313526192338, + "grad_norm": 0.4939782917499542, + "learning_rate": 2.4391865544249687e-05, + "loss": 0.0804, + "step": 1801 + }, + { + "epoch": 1.4089132134480062, + "grad_norm": 0.4043434262275696, + "learning_rate": 2.4333282162363402e-05, + "loss": 0.0754, + "step": 1802 + }, + { + "epoch": 1.4096950742767786, + "grad_norm": 0.46937301754951477, + "learning_rate": 2.4274746583941975e-05, + "loss": 0.0391, + "step": 1803 + }, + { + "epoch": 1.4104769351055513, + "grad_norm": 0.3977530002593994, + "learning_rate": 2.4216258918006033e-05, + "loss": 0.0621, + "step": 1804 + }, + { + "epoch": 1.4112587959343237, + "grad_norm": 0.4665568768978119, + "learning_rate": 2.415781927348709e-05, + "loss": 0.06, + "step": 1805 + }, + { + "epoch": 1.412040656763096, + "grad_norm": 0.34306639432907104, + "learning_rate": 2.4099427759227183e-05, + "loss": 0.0427, + "step": 1806 + }, + { + "epoch": 1.4128225175918687, + "grad_norm": 0.3104557394981384, + "learning_rate": 2.4041084483978616e-05, + "loss": 0.038, + "step": 1807 + }, + { + "epoch": 1.4136043784206411, + "grad_norm": 0.45473772287368774, + "learning_rate": 2.3982789556404e-05, + "loss": 0.0842, + "step": 1808 + }, + { + "epoch": 1.4143862392494135, + "grad_norm": 0.43577784299850464, + "learning_rate": 2.39245430850758e-05, + "loss": 0.0577, + "step": 1809 + }, + { + "epoch": 1.4151681000781862, + "grad_norm": 0.33145925402641296, + "learning_rate": 2.386634517847626e-05, + "loss": 0.0459, + "step": 1810 + }, + { + "epoch": 1.4159499609069586, + "grad_norm": 0.4155854284763336, + "learning_rate": 2.380819594499718e-05, + "loss": 0.0449, + "step": 1811 + }, + { + "epoch": 1.416731821735731, + "grad_norm": 0.33247607946395874, + "learning_rate": 2.3750095492939695e-05, + "loss": 0.0786, + "step": 1812 + }, + { + "epoch": 1.4175136825645036, + "grad_norm": 0.398334264755249, + "learning_rate": 2.369204393051409e-05, + "loss": 0.0849, + "step": 1813 + }, + { + "epoch": 1.418295543393276, + "grad_norm": 0.4067325294017792, + "learning_rate": 2.3634041365839588e-05, + "loss": 0.0603, + "step": 1814 + }, + { + "epoch": 1.4190774042220484, + "grad_norm": 0.4016716778278351, + "learning_rate": 2.3576087906944195e-05, + "loss": 0.0745, + "step": 1815 + }, + { + "epoch": 1.419859265050821, + "grad_norm": 0.36571478843688965, + "learning_rate": 2.351818366176443e-05, + "loss": 0.0616, + "step": 1816 + }, + { + "epoch": 1.4206411258795935, + "grad_norm": 0.3647540509700775, + "learning_rate": 2.34603287381451e-05, + "loss": 0.0727, + "step": 1817 + }, + { + "epoch": 1.4214229867083659, + "grad_norm": 0.32372230291366577, + "learning_rate": 2.340252324383925e-05, + "loss": 0.0571, + "step": 1818 + }, + { + "epoch": 1.4222048475371385, + "grad_norm": 0.43298980593681335, + "learning_rate": 2.33447672865078e-05, + "loss": 0.0498, + "step": 1819 + }, + { + "epoch": 1.422986708365911, + "grad_norm": 0.40937963128089905, + "learning_rate": 2.3287060973719433e-05, + "loss": 0.0609, + "step": 1820 + }, + { + "epoch": 1.4237685691946833, + "grad_norm": 0.42888909578323364, + "learning_rate": 2.322940441295036e-05, + "loss": 0.0647, + "step": 1821 + }, + { + "epoch": 1.424550430023456, + "grad_norm": 0.5512627363204956, + "learning_rate": 2.317179771158414e-05, + "loss": 0.0887, + "step": 1822 + }, + { + "epoch": 1.4253322908522283, + "grad_norm": 0.4589550793170929, + "learning_rate": 2.3114240976911466e-05, + "loss": 0.0585, + "step": 1823 + }, + { + "epoch": 1.4261141516810008, + "grad_norm": 0.3198803663253784, + "learning_rate": 2.305673431612994e-05, + "loss": 0.0577, + "step": 1824 + }, + { + "epoch": 1.4268960125097734, + "grad_norm": 0.33503422141075134, + "learning_rate": 2.2999277836343973e-05, + "loss": 0.0504, + "step": 1825 + }, + { + "epoch": 1.4276778733385458, + "grad_norm": 0.3085525333881378, + "learning_rate": 2.2941871644564482e-05, + "loss": 0.065, + "step": 1826 + }, + { + "epoch": 1.4284597341673182, + "grad_norm": 0.4282321035861969, + "learning_rate": 2.2884515847708648e-05, + "loss": 0.0543, + "step": 1827 + }, + { + "epoch": 1.4292415949960908, + "grad_norm": 0.3238259255886078, + "learning_rate": 2.2827210552599925e-05, + "loss": 0.0507, + "step": 1828 + }, + { + "epoch": 1.4300234558248632, + "grad_norm": 0.37012743949890137, + "learning_rate": 2.276995586596763e-05, + "loss": 0.0297, + "step": 1829 + }, + { + "epoch": 1.4308053166536356, + "grad_norm": 0.3611026108264923, + "learning_rate": 2.2712751894446833e-05, + "loss": 0.0389, + "step": 1830 + }, + { + "epoch": 1.4315871774824083, + "grad_norm": 0.26761916279792786, + "learning_rate": 2.265559874457815e-05, + "loss": 0.0505, + "step": 1831 + }, + { + "epoch": 1.4323690383111807, + "grad_norm": 0.4100571274757385, + "learning_rate": 2.2598496522807556e-05, + "loss": 0.0591, + "step": 1832 + }, + { + "epoch": 1.433150899139953, + "grad_norm": 0.2951042652130127, + "learning_rate": 2.254144533548616e-05, + "loss": 0.0343, + "step": 1833 + }, + { + "epoch": 1.4339327599687255, + "grad_norm": 0.49010226130485535, + "learning_rate": 2.2484445288870037e-05, + "loss": 0.079, + "step": 1834 + }, + { + "epoch": 1.4347146207974981, + "grad_norm": 0.37976258993148804, + "learning_rate": 2.2427496489119986e-05, + "loss": 0.037, + "step": 1835 + }, + { + "epoch": 1.4354964816262705, + "grad_norm": 0.4197808802127838, + "learning_rate": 2.2370599042301394e-05, + "loss": 0.0572, + "step": 1836 + }, + { + "epoch": 1.436278342455043, + "grad_norm": 0.30215969681739807, + "learning_rate": 2.2313753054383958e-05, + "loss": 0.0342, + "step": 1837 + }, + { + "epoch": 1.4370602032838156, + "grad_norm": 0.3675067126750946, + "learning_rate": 2.225695863124161e-05, + "loss": 0.0676, + "step": 1838 + }, + { + "epoch": 1.437842064112588, + "grad_norm": 0.550208568572998, + "learning_rate": 2.220021587865218e-05, + "loss": 0.0797, + "step": 1839 + }, + { + "epoch": 1.4386239249413604, + "grad_norm": 0.35796505212783813, + "learning_rate": 2.214352490229728e-05, + "loss": 0.0376, + "step": 1840 + }, + { + "epoch": 1.4394057857701328, + "grad_norm": 0.3726497292518616, + "learning_rate": 2.2086885807762093e-05, + "loss": 0.0602, + "step": 1841 + }, + { + "epoch": 1.4401876465989054, + "grad_norm": 0.47628331184387207, + "learning_rate": 2.203029870053517e-05, + "loss": 0.0671, + "step": 1842 + }, + { + "epoch": 1.4409695074276778, + "grad_norm": 0.3403119444847107, + "learning_rate": 2.197376368600825e-05, + "loss": 0.0853, + "step": 1843 + }, + { + "epoch": 1.4417513682564502, + "grad_norm": 0.39009755849838257, + "learning_rate": 2.191728086947603e-05, + "loss": 0.0405, + "step": 1844 + }, + { + "epoch": 1.4425332290852229, + "grad_norm": 0.340263307094574, + "learning_rate": 2.1860850356136015e-05, + "loss": 0.0566, + "step": 1845 + }, + { + "epoch": 1.4433150899139953, + "grad_norm": 0.3942875862121582, + "learning_rate": 2.180447225108827e-05, + "loss": 0.0804, + "step": 1846 + }, + { + "epoch": 1.4440969507427677, + "grad_norm": 0.44318339228630066, + "learning_rate": 2.1748146659335256e-05, + "loss": 0.0542, + "step": 1847 + }, + { + "epoch": 1.4448788115715403, + "grad_norm": 0.36085474491119385, + "learning_rate": 2.1691873685781673e-05, + "loss": 0.06, + "step": 1848 + }, + { + "epoch": 1.4456606724003127, + "grad_norm": 0.3341085612773895, + "learning_rate": 2.163565343523416e-05, + "loss": 0.0467, + "step": 1849 + }, + { + "epoch": 1.4464425332290851, + "grad_norm": 0.48646220564842224, + "learning_rate": 2.15794860124012e-05, + "loss": 0.0559, + "step": 1850 + }, + { + "epoch": 1.4472243940578577, + "grad_norm": 0.3500666618347168, + "learning_rate": 2.152337152189287e-05, + "loss": 0.0617, + "step": 1851 + }, + { + "epoch": 1.4480062548866302, + "grad_norm": 0.34813258051872253, + "learning_rate": 2.1467310068220658e-05, + "loss": 0.0397, + "step": 1852 + }, + { + "epoch": 1.4487881157154026, + "grad_norm": 0.5163322687149048, + "learning_rate": 2.1411301755797293e-05, + "loss": 0.0591, + "step": 1853 + }, + { + "epoch": 1.4495699765441752, + "grad_norm": 0.37251824140548706, + "learning_rate": 2.1355346688936505e-05, + "loss": 0.0317, + "step": 1854 + }, + { + "epoch": 1.4503518373729476, + "grad_norm": 0.42122918367385864, + "learning_rate": 2.1299444971852876e-05, + "loss": 0.0363, + "step": 1855 + }, + { + "epoch": 1.45113369820172, + "grad_norm": 0.3831481337547302, + "learning_rate": 2.1243596708661618e-05, + "loss": 0.0396, + "step": 1856 + }, + { + "epoch": 1.4519155590304926, + "grad_norm": 0.40503552556037903, + "learning_rate": 2.118780200337836e-05, + "loss": 0.0491, + "step": 1857 + }, + { + "epoch": 1.452697419859265, + "grad_norm": 0.6177302598953247, + "learning_rate": 2.113206095991907e-05, + "loss": 0.0611, + "step": 1858 + }, + { + "epoch": 1.4534792806880374, + "grad_norm": 0.4156329035758972, + "learning_rate": 2.107637368209966e-05, + "loss": 0.0616, + "step": 1859 + }, + { + "epoch": 1.45426114151681, + "grad_norm": 0.41171953082084656, + "learning_rate": 2.102074027363594e-05, + "loss": 0.0472, + "step": 1860 + }, + { + "epoch": 1.4550430023455825, + "grad_norm": 0.5042744278907776, + "learning_rate": 2.096516083814346e-05, + "loss": 0.0731, + "step": 1861 + }, + { + "epoch": 1.4558248631743549, + "grad_norm": 0.36022377014160156, + "learning_rate": 2.0909635479137163e-05, + "loss": 0.0465, + "step": 1862 + }, + { + "epoch": 1.4566067240031275, + "grad_norm": 0.45423850417137146, + "learning_rate": 2.085416430003131e-05, + "loss": 0.0557, + "step": 1863 + }, + { + "epoch": 1.4573885848319, + "grad_norm": 0.46697577834129333, + "learning_rate": 2.079874740413924e-05, + "loss": 0.1132, + "step": 1864 + }, + { + "epoch": 1.4581704456606723, + "grad_norm": 0.46780556440353394, + "learning_rate": 2.074338489467322e-05, + "loss": 0.0849, + "step": 1865 + }, + { + "epoch": 1.458952306489445, + "grad_norm": 0.5109332203865051, + "learning_rate": 2.0688076874744183e-05, + "loss": 0.0803, + "step": 1866 + }, + { + "epoch": 1.4597341673182174, + "grad_norm": 0.3422967493534088, + "learning_rate": 2.0632823447361593e-05, + "loss": 0.0786, + "step": 1867 + }, + { + "epoch": 1.4605160281469898, + "grad_norm": 0.26753708720207214, + "learning_rate": 2.057762471543329e-05, + "loss": 0.0287, + "step": 1868 + }, + { + "epoch": 1.4612978889757624, + "grad_norm": 0.4200882911682129, + "learning_rate": 2.0522480781765153e-05, + "loss": 0.0504, + "step": 1869 + }, + { + "epoch": 1.4620797498045348, + "grad_norm": 0.5091989040374756, + "learning_rate": 2.0467391749061032e-05, + "loss": 0.0666, + "step": 1870 + }, + { + "epoch": 1.4628616106333072, + "grad_norm": 0.3979477286338806, + "learning_rate": 2.0412357719922593e-05, + "loss": 0.0457, + "step": 1871 + }, + { + "epoch": 1.4636434714620798, + "grad_norm": 0.32263174653053284, + "learning_rate": 2.0357378796848968e-05, + "loss": 0.0695, + "step": 1872 + }, + { + "epoch": 1.4644253322908523, + "grad_norm": 0.32924485206604004, + "learning_rate": 2.0302455082236716e-05, + "loss": 0.07, + "step": 1873 + }, + { + "epoch": 1.4652071931196247, + "grad_norm": 0.3112393021583557, + "learning_rate": 2.0247586678379534e-05, + "loss": 0.054, + "step": 1874 + }, + { + "epoch": 1.4659890539483973, + "grad_norm": 0.3827929198741913, + "learning_rate": 2.019277368746812e-05, + "loss": 0.064, + "step": 1875 + }, + { + "epoch": 1.4667709147771697, + "grad_norm": 0.4558732509613037, + "learning_rate": 2.0138016211589972e-05, + "loss": 0.0433, + "step": 1876 + }, + { + "epoch": 1.467552775605942, + "grad_norm": 0.5064982175827026, + "learning_rate": 2.008331435272917e-05, + "loss": 0.0723, + "step": 1877 + }, + { + "epoch": 1.4683346364347147, + "grad_norm": 0.3777099549770355, + "learning_rate": 2.0028668212766283e-05, + "loss": 0.0542, + "step": 1878 + }, + { + "epoch": 1.4691164972634871, + "grad_norm": 0.3097599744796753, + "learning_rate": 1.997407789347799e-05, + "loss": 0.0495, + "step": 1879 + }, + { + "epoch": 1.4698983580922595, + "grad_norm": 0.2744178771972656, + "learning_rate": 1.991954349653707e-05, + "loss": 0.1154, + "step": 1880 + }, + { + "epoch": 1.4706802189210322, + "grad_norm": 0.3737988770008087, + "learning_rate": 1.9865065123512194e-05, + "loss": 0.0682, + "step": 1881 + }, + { + "epoch": 1.4714620797498046, + "grad_norm": 0.5198439955711365, + "learning_rate": 1.9810642875867573e-05, + "loss": 0.0913, + "step": 1882 + }, + { + "epoch": 1.472243940578577, + "grad_norm": 0.3445640802383423, + "learning_rate": 1.9756276854963002e-05, + "loss": 0.0829, + "step": 1883 + }, + { + "epoch": 1.4730258014073496, + "grad_norm": 0.3063513934612274, + "learning_rate": 1.970196716205349e-05, + "loss": 0.0344, + "step": 1884 + }, + { + "epoch": 1.473807662236122, + "grad_norm": 0.4947914481163025, + "learning_rate": 1.9647713898289154e-05, + "loss": 0.0694, + "step": 1885 + }, + { + "epoch": 1.4745895230648944, + "grad_norm": 0.35459840297698975, + "learning_rate": 1.9593517164715014e-05, + "loss": 0.054, + "step": 1886 + }, + { + "epoch": 1.475371383893667, + "grad_norm": 0.4725291132926941, + "learning_rate": 1.953937706227078e-05, + "loss": 0.0764, + "step": 1887 + }, + { + "epoch": 1.4761532447224395, + "grad_norm": 0.47036585211753845, + "learning_rate": 1.9485293691790774e-05, + "loss": 0.0679, + "step": 1888 + }, + { + "epoch": 1.4769351055512119, + "grad_norm": 0.4962479770183563, + "learning_rate": 1.943126715400353e-05, + "loss": 0.0753, + "step": 1889 + }, + { + "epoch": 1.4777169663799843, + "grad_norm": 0.33534544706344604, + "learning_rate": 1.9377297549531805e-05, + "loss": 0.0316, + "step": 1890 + }, + { + "epoch": 1.478498827208757, + "grad_norm": 0.5018200874328613, + "learning_rate": 1.9323384978892357e-05, + "loss": 0.053, + "step": 1891 + }, + { + "epoch": 1.4792806880375293, + "grad_norm": 0.3958439230918884, + "learning_rate": 1.9269529542495605e-05, + "loss": 0.0628, + "step": 1892 + }, + { + "epoch": 1.4800625488663017, + "grad_norm": 0.40492334961891174, + "learning_rate": 1.921573134064569e-05, + "loss": 0.0504, + "step": 1893 + }, + { + "epoch": 1.4808444096950744, + "grad_norm": 0.4669480323791504, + "learning_rate": 1.9161990473540074e-05, + "loss": 0.0658, + "step": 1894 + }, + { + "epoch": 1.4816262705238468, + "grad_norm": 0.40686583518981934, + "learning_rate": 1.9108307041269418e-05, + "loss": 0.0595, + "step": 1895 + }, + { + "epoch": 1.4824081313526192, + "grad_norm": 0.37184271216392517, + "learning_rate": 1.9054681143817487e-05, + "loss": 0.0571, + "step": 1896 + }, + { + "epoch": 1.4831899921813916, + "grad_norm": 0.4182341396808624, + "learning_rate": 1.9001112881060845e-05, + "loss": 0.03, + "step": 1897 + }, + { + "epoch": 1.4839718530101642, + "grad_norm": 0.42824220657348633, + "learning_rate": 1.89476023527687e-05, + "loss": 0.0436, + "step": 1898 + }, + { + "epoch": 1.4847537138389366, + "grad_norm": 0.4151003658771515, + "learning_rate": 1.8894149658602767e-05, + "loss": 0.0515, + "step": 1899 + }, + { + "epoch": 1.485535574667709, + "grad_norm": 0.38467374444007874, + "learning_rate": 1.8840754898117e-05, + "loss": 0.0334, + "step": 1900 + }, + { + "epoch": 1.4863174354964817, + "grad_norm": 0.6196097731590271, + "learning_rate": 1.878741817075754e-05, + "loss": 0.0773, + "step": 1901 + }, + { + "epoch": 1.487099296325254, + "grad_norm": 0.3245231509208679, + "learning_rate": 1.8734139575862313e-05, + "loss": 0.0367, + "step": 1902 + }, + { + "epoch": 1.4878811571540265, + "grad_norm": 0.45020565390586853, + "learning_rate": 1.8680919212661097e-05, + "loss": 0.04, + "step": 1903 + }, + { + "epoch": 1.488663017982799, + "grad_norm": 0.5742055773735046, + "learning_rate": 1.8627757180275175e-05, + "loss": 0.0592, + "step": 1904 + }, + { + "epoch": 1.4894448788115715, + "grad_norm": 0.35358932614326477, + "learning_rate": 1.8574653577717116e-05, + "loss": 0.0777, + "step": 1905 + }, + { + "epoch": 1.490226739640344, + "grad_norm": 0.3724954426288605, + "learning_rate": 1.8521608503890787e-05, + "loss": 0.0436, + "step": 1906 + }, + { + "epoch": 1.4910086004691165, + "grad_norm": 0.4322279393672943, + "learning_rate": 1.8468622057590978e-05, + "loss": 0.0396, + "step": 1907 + }, + { + "epoch": 1.491790461297889, + "grad_norm": 0.297240287065506, + "learning_rate": 1.8415694337503297e-05, + "loss": 0.0313, + "step": 1908 + }, + { + "epoch": 1.4925723221266614, + "grad_norm": 0.2714248299598694, + "learning_rate": 1.836282544220398e-05, + "loss": 0.0375, + "step": 1909 + }, + { + "epoch": 1.493354182955434, + "grad_norm": 0.5247171521186829, + "learning_rate": 1.8310015470159685e-05, + "loss": 0.0567, + "step": 1910 + }, + { + "epoch": 1.4941360437842064, + "grad_norm": 0.30854910612106323, + "learning_rate": 1.825726451972739e-05, + "loss": 0.0451, + "step": 1911 + }, + { + "epoch": 1.4949179046129788, + "grad_norm": 0.4068269431591034, + "learning_rate": 1.820457268915404e-05, + "loss": 0.0481, + "step": 1912 + }, + { + "epoch": 1.4956997654417514, + "grad_norm": 0.39392808079719543, + "learning_rate": 1.815194007657659e-05, + "loss": 0.057, + "step": 1913 + }, + { + "epoch": 1.4964816262705238, + "grad_norm": 0.31934913992881775, + "learning_rate": 1.809936678002163e-05, + "loss": 0.0219, + "step": 1914 + }, + { + "epoch": 1.4972634870992962, + "grad_norm": 0.5628071427345276, + "learning_rate": 1.804685289740526e-05, + "loss": 0.0834, + "step": 1915 + }, + { + "epoch": 1.4980453479280689, + "grad_norm": 0.3296336829662323, + "learning_rate": 1.7994398526532986e-05, + "loss": 0.0268, + "step": 1916 + }, + { + "epoch": 1.4988272087568413, + "grad_norm": 0.40787026286125183, + "learning_rate": 1.794200376509944e-05, + "loss": 0.0746, + "step": 1917 + }, + { + "epoch": 1.4996090695856137, + "grad_norm": 0.4122026860713959, + "learning_rate": 1.788966871068824e-05, + "loss": 0.0591, + "step": 1918 + }, + { + "epoch": 1.5003909304143863, + "grad_norm": 0.4837484359741211, + "learning_rate": 1.7837393460771795e-05, + "loss": 0.0713, + "step": 1919 + }, + { + "epoch": 1.5011727912431587, + "grad_norm": 0.36081263422966003, + "learning_rate": 1.7785178112711114e-05, + "loss": 0.0536, + "step": 1920 + }, + { + "epoch": 1.5019546520719311, + "grad_norm": 0.5664728879928589, + "learning_rate": 1.7733022763755725e-05, + "loss": 0.0981, + "step": 1921 + }, + { + "epoch": 1.5027365129007038, + "grad_norm": 0.46622321009635925, + "learning_rate": 1.7680927511043277e-05, + "loss": 0.0736, + "step": 1922 + }, + { + "epoch": 1.5035183737294762, + "grad_norm": 0.4234619438648224, + "learning_rate": 1.762889245159957e-05, + "loss": 0.0952, + "step": 1923 + }, + { + "epoch": 1.5043002345582486, + "grad_norm": 0.3387734591960907, + "learning_rate": 1.757691768233834e-05, + "loss": 0.0601, + "step": 1924 + }, + { + "epoch": 1.5050820953870212, + "grad_norm": 0.35551008582115173, + "learning_rate": 1.7525003300060904e-05, + "loss": 0.0887, + "step": 1925 + }, + { + "epoch": 1.5058639562157936, + "grad_norm": 0.5371754765510559, + "learning_rate": 1.747314940145624e-05, + "loss": 0.0658, + "step": 1926 + }, + { + "epoch": 1.506645817044566, + "grad_norm": 0.3989940881729126, + "learning_rate": 1.7421356083100615e-05, + "loss": 0.0381, + "step": 1927 + }, + { + "epoch": 1.5074276778733386, + "grad_norm": 0.44846925139427185, + "learning_rate": 1.7369623441457465e-05, + "loss": 0.0607, + "step": 1928 + }, + { + "epoch": 1.508209538702111, + "grad_norm": 0.3646467328071594, + "learning_rate": 1.7317951572877237e-05, + "loss": 0.0551, + "step": 1929 + }, + { + "epoch": 1.5089913995308835, + "grad_norm": 0.41458413004875183, + "learning_rate": 1.7266340573597163e-05, + "loss": 0.0536, + "step": 1930 + }, + { + "epoch": 1.509773260359656, + "grad_norm": 0.3311718702316284, + "learning_rate": 1.7214790539741167e-05, + "loss": 0.0459, + "step": 1931 + }, + { + "epoch": 1.5105551211884285, + "grad_norm": 0.3195326626300812, + "learning_rate": 1.7163301567319546e-05, + "loss": 0.0285, + "step": 1932 + }, + { + "epoch": 1.511336982017201, + "grad_norm": 0.29825884103775024, + "learning_rate": 1.7111873752228907e-05, + "loss": 0.0555, + "step": 1933 + }, + { + "epoch": 1.5121188428459735, + "grad_norm": 0.4819906949996948, + "learning_rate": 1.7060507190252012e-05, + "loss": 0.0894, + "step": 1934 + }, + { + "epoch": 1.512900703674746, + "grad_norm": 0.4403591454029083, + "learning_rate": 1.700920197705742e-05, + "loss": 0.0653, + "step": 1935 + }, + { + "epoch": 1.5136825645035183, + "grad_norm": 0.3573175370693207, + "learning_rate": 1.6957958208199548e-05, + "loss": 0.0453, + "step": 1936 + }, + { + "epoch": 1.514464425332291, + "grad_norm": 0.3273763656616211, + "learning_rate": 1.6906775979118307e-05, + "loss": 0.0469, + "step": 1937 + }, + { + "epoch": 1.5152462861610634, + "grad_norm": 0.29220449924468994, + "learning_rate": 1.6855655385139002e-05, + "loss": 0.0337, + "step": 1938 + }, + { + "epoch": 1.5160281469898358, + "grad_norm": 0.4651147425174713, + "learning_rate": 1.680459652147216e-05, + "loss": 0.1163, + "step": 1939 + }, + { + "epoch": 1.5168100078186084, + "grad_norm": 0.43985429406166077, + "learning_rate": 1.675359948321333e-05, + "loss": 0.0678, + "step": 1940 + }, + { + "epoch": 1.5175918686473806, + "grad_norm": 0.2881910800933838, + "learning_rate": 1.67026643653429e-05, + "loss": 0.0332, + "step": 1941 + }, + { + "epoch": 1.5183737294761532, + "grad_norm": 0.7812718152999878, + "learning_rate": 1.6651791262725958e-05, + "loss": 0.0723, + "step": 1942 + }, + { + "epoch": 1.5191555903049259, + "grad_norm": 0.4525265097618103, + "learning_rate": 1.6600980270112055e-05, + "loss": 0.05, + "step": 1943 + }, + { + "epoch": 1.519937451133698, + "grad_norm": 0.4249601662158966, + "learning_rate": 1.6550231482135136e-05, + "loss": 0.0504, + "step": 1944 + }, + { + "epoch": 1.5207193119624707, + "grad_norm": 0.40071019530296326, + "learning_rate": 1.6499544993313183e-05, + "loss": 0.0466, + "step": 1945 + }, + { + "epoch": 1.5215011727912433, + "grad_norm": 0.3151744306087494, + "learning_rate": 1.6448920898048247e-05, + "loss": 0.046, + "step": 1946 + }, + { + "epoch": 1.5222830336200155, + "grad_norm": 0.21613501012325287, + "learning_rate": 1.6398359290626135e-05, + "loss": 0.0159, + "step": 1947 + }, + { + "epoch": 1.5230648944487881, + "grad_norm": 0.38744068145751953, + "learning_rate": 1.634786026521623e-05, + "loss": 0.0569, + "step": 1948 + }, + { + "epoch": 1.5238467552775607, + "grad_norm": 0.33948633074760437, + "learning_rate": 1.629742391587144e-05, + "loss": 0.0263, + "step": 1949 + }, + { + "epoch": 1.524628616106333, + "grad_norm": 0.558428168296814, + "learning_rate": 1.6247050336527886e-05, + "loss": 0.056, + "step": 1950 + }, + { + "epoch": 1.5254104769351056, + "grad_norm": 0.3884804844856262, + "learning_rate": 1.619673962100479e-05, + "loss": 0.0611, + "step": 1951 + }, + { + "epoch": 1.5261923377638782, + "grad_norm": 0.4065593481063843, + "learning_rate": 1.6146491863004303e-05, + "loss": 0.0371, + "step": 1952 + }, + { + "epoch": 1.5269741985926504, + "grad_norm": 0.4196559190750122, + "learning_rate": 1.6096307156111312e-05, + "loss": 0.0551, + "step": 1953 + }, + { + "epoch": 1.527756059421423, + "grad_norm": 0.3707648515701294, + "learning_rate": 1.6046185593793272e-05, + "loss": 0.0424, + "step": 1954 + }, + { + "epoch": 1.5285379202501954, + "grad_norm": 0.3814994990825653, + "learning_rate": 1.5996127269400023e-05, + "loss": 0.0238, + "step": 1955 + }, + { + "epoch": 1.5293197810789678, + "grad_norm": 0.4228754937648773, + "learning_rate": 1.594613227616367e-05, + "loss": 0.0558, + "step": 1956 + }, + { + "epoch": 1.5301016419077405, + "grad_norm": 0.5330584645271301, + "learning_rate": 1.589620070719834e-05, + "loss": 0.1043, + "step": 1957 + }, + { + "epoch": 1.5308835027365129, + "grad_norm": 0.5210996866226196, + "learning_rate": 1.5846332655499972e-05, + "loss": 0.0895, + "step": 1958 + }, + { + "epoch": 1.5316653635652853, + "grad_norm": 0.5705960988998413, + "learning_rate": 1.579652821394632e-05, + "loss": 0.0708, + "step": 1959 + }, + { + "epoch": 1.532447224394058, + "grad_norm": 0.420893132686615, + "learning_rate": 1.574678747529659e-05, + "loss": 0.0559, + "step": 1960 + }, + { + "epoch": 1.5332290852228303, + "grad_norm": 0.36799362301826477, + "learning_rate": 1.5697110532191366e-05, + "loss": 0.0472, + "step": 1961 + }, + { + "epoch": 1.5340109460516027, + "grad_norm": 0.2931898832321167, + "learning_rate": 1.5647497477152406e-05, + "loss": 0.0376, + "step": 1962 + }, + { + "epoch": 1.5347928068803753, + "grad_norm": 0.36866313219070435, + "learning_rate": 1.559794840258249e-05, + "loss": 0.0419, + "step": 1963 + }, + { + "epoch": 1.5355746677091477, + "grad_norm": 0.4287290871143341, + "learning_rate": 1.5548463400765227e-05, + "loss": 0.0564, + "step": 1964 + }, + { + "epoch": 1.5363565285379202, + "grad_norm": 0.38672366738319397, + "learning_rate": 1.549904256386488e-05, + "loss": 0.0691, + "step": 1965 + }, + { + "epoch": 1.5371383893666928, + "grad_norm": 0.4107593894004822, + "learning_rate": 1.544968598392626e-05, + "loss": 0.0998, + "step": 1966 + }, + { + "epoch": 1.5379202501954652, + "grad_norm": 0.3715873658657074, + "learning_rate": 1.5400393752874454e-05, + "loss": 0.0748, + "step": 1967 + }, + { + "epoch": 1.5387021110242376, + "grad_norm": 0.4931233525276184, + "learning_rate": 1.5351165962514675e-05, + "loss": 0.0277, + "step": 1968 + }, + { + "epoch": 1.5394839718530102, + "grad_norm": 0.4674701392650604, + "learning_rate": 1.5302002704532192e-05, + "loss": 0.0462, + "step": 1969 + }, + { + "epoch": 1.5402658326817826, + "grad_norm": 0.4781097173690796, + "learning_rate": 1.5252904070492035e-05, + "loss": 0.0506, + "step": 1970 + }, + { + "epoch": 1.541047693510555, + "grad_norm": 0.2934138774871826, + "learning_rate": 1.5203870151838884e-05, + "loss": 0.0477, + "step": 1971 + }, + { + "epoch": 1.5418295543393277, + "grad_norm": 0.5006688833236694, + "learning_rate": 1.515490103989689e-05, + "loss": 0.0495, + "step": 1972 + }, + { + "epoch": 1.5426114151681, + "grad_norm": 0.3465513586997986, + "learning_rate": 1.51059968258695e-05, + "loss": 0.0478, + "step": 1973 + }, + { + "epoch": 1.5433932759968725, + "grad_norm": 0.3733910620212555, + "learning_rate": 1.5057157600839305e-05, + "loss": 0.0645, + "step": 1974 + }, + { + "epoch": 1.5441751368256451, + "grad_norm": 0.37032419443130493, + "learning_rate": 1.5008383455767828e-05, + "loss": 0.0594, + "step": 1975 + }, + { + "epoch": 1.5449569976544175, + "grad_norm": 0.41008704900741577, + "learning_rate": 1.4959674481495412e-05, + "loss": 0.0647, + "step": 1976 + }, + { + "epoch": 1.54573885848319, + "grad_norm": 0.37868285179138184, + "learning_rate": 1.4911030768741003e-05, + "loss": 0.0403, + "step": 1977 + }, + { + "epoch": 1.5465207193119626, + "grad_norm": 0.3398186266422272, + "learning_rate": 1.4862452408101996e-05, + "loss": 0.0485, + "step": 1978 + }, + { + "epoch": 1.547302580140735, + "grad_norm": 0.37676557898521423, + "learning_rate": 1.4813939490054095e-05, + "loss": 0.0696, + "step": 1979 + }, + { + "epoch": 1.5480844409695074, + "grad_norm": 0.3662286698818207, + "learning_rate": 1.4765492104951107e-05, + "loss": 0.0509, + "step": 1980 + }, + { + "epoch": 1.54886630179828, + "grad_norm": 0.37704092264175415, + "learning_rate": 1.471711034302477e-05, + "loss": 0.0449, + "step": 1981 + }, + { + "epoch": 1.5496481626270524, + "grad_norm": 0.37568730115890503, + "learning_rate": 1.466879429438462e-05, + "loss": 0.0686, + "step": 1982 + }, + { + "epoch": 1.5504300234558248, + "grad_norm": 0.3329296112060547, + "learning_rate": 1.4620544049017787e-05, + "loss": 0.0606, + "step": 1983 + }, + { + "epoch": 1.5512118842845974, + "grad_norm": 0.47085657715797424, + "learning_rate": 1.4572359696788868e-05, + "loss": 0.0732, + "step": 1984 + }, + { + "epoch": 1.5519937451133698, + "grad_norm": 0.6710319519042969, + "learning_rate": 1.4524241327439708e-05, + "loss": 0.1023, + "step": 1985 + }, + { + "epoch": 1.5527756059421423, + "grad_norm": 0.363131046295166, + "learning_rate": 1.4476189030589287e-05, + "loss": 0.0549, + "step": 1986 + }, + { + "epoch": 1.5535574667709149, + "grad_norm": 0.334349662065506, + "learning_rate": 1.4428202895733505e-05, + "loss": 0.0427, + "step": 1987 + }, + { + "epoch": 1.5543393275996873, + "grad_norm": 0.3576214611530304, + "learning_rate": 1.4380283012245044e-05, + "loss": 0.0437, + "step": 1988 + }, + { + "epoch": 1.5551211884284597, + "grad_norm": 0.47771093249320984, + "learning_rate": 1.4332429469373226e-05, + "loss": 0.0388, + "step": 1989 + }, + { + "epoch": 1.5559030492572323, + "grad_norm": 0.3168260157108307, + "learning_rate": 1.4284642356243765e-05, + "loss": 0.0596, + "step": 1990 + }, + { + "epoch": 1.5566849100860047, + "grad_norm": 0.38129621744155884, + "learning_rate": 1.4236921761858685e-05, + "loss": 0.0956, + "step": 1991 + }, + { + "epoch": 1.5574667709147771, + "grad_norm": 0.39686211943626404, + "learning_rate": 1.4189267775096104e-05, + "loss": 0.0273, + "step": 1992 + }, + { + "epoch": 1.5582486317435498, + "grad_norm": 0.39223000407218933, + "learning_rate": 1.4141680484710095e-05, + "loss": 0.05, + "step": 1993 + }, + { + "epoch": 1.5590304925723222, + "grad_norm": 0.3337058424949646, + "learning_rate": 1.4094159979330513e-05, + "loss": 0.0445, + "step": 1994 + }, + { + "epoch": 1.5598123534010946, + "grad_norm": 0.37635454535484314, + "learning_rate": 1.4046706347462819e-05, + "loss": 0.0341, + "step": 1995 + }, + { + "epoch": 1.5605942142298672, + "grad_norm": 0.35981684923171997, + "learning_rate": 1.3999319677487926e-05, + "loss": 0.0569, + "step": 1996 + }, + { + "epoch": 1.5613760750586394, + "grad_norm": 0.5779023170471191, + "learning_rate": 1.395200005766204e-05, + "loss": 0.079, + "step": 1997 + }, + { + "epoch": 1.562157935887412, + "grad_norm": 0.3641951382160187, + "learning_rate": 1.3904747576116462e-05, + "loss": 0.1088, + "step": 1998 + }, + { + "epoch": 1.5629397967161847, + "grad_norm": 0.5276214480400085, + "learning_rate": 1.3857562320857526e-05, + "loss": 0.072, + "step": 1999 + }, + { + "epoch": 1.5637216575449568, + "grad_norm": 0.3834344446659088, + "learning_rate": 1.3810444379766246e-05, + "loss": 0.0414, + "step": 2000 + }, + { + "epoch": 1.5637216575449568, + "eval_loss": 0.12067654728889465, + "eval_runtime": 13.4086, + "eval_samples_per_second": 3.878, + "eval_steps_per_second": 0.97, + "step": 2000 + }, + { + "epoch": 1.5645035183737295, + "grad_norm": 0.46198686957359314, + "learning_rate": 1.3763393840598338e-05, + "loss": 0.039, + "step": 2001 + }, + { + "epoch": 1.565285379202502, + "grad_norm": 0.3458808958530426, + "learning_rate": 1.3716410790983991e-05, + "loss": 0.0597, + "step": 2002 + }, + { + "epoch": 1.5660672400312743, + "grad_norm": 0.4747830033302307, + "learning_rate": 1.3669495318427666e-05, + "loss": 0.0455, + "step": 2003 + }, + { + "epoch": 1.566849100860047, + "grad_norm": 0.5279855132102966, + "learning_rate": 1.3622647510307968e-05, + "loss": 0.044, + "step": 2004 + }, + { + "epoch": 1.5676309616888195, + "grad_norm": 0.5206402540206909, + "learning_rate": 1.3575867453877488e-05, + "loss": 0.0562, + "step": 2005 + }, + { + "epoch": 1.5684128225175917, + "grad_norm": 0.44958412647247314, + "learning_rate": 1.352915523626263e-05, + "loss": 0.0669, + "step": 2006 + }, + { + "epoch": 1.5691946833463644, + "grad_norm": 0.30790039896965027, + "learning_rate": 1.3482510944463445e-05, + "loss": 0.0207, + "step": 2007 + }, + { + "epoch": 1.569976544175137, + "grad_norm": 0.4307234585285187, + "learning_rate": 1.3435934665353466e-05, + "loss": 0.0678, + "step": 2008 + }, + { + "epoch": 1.5707584050039092, + "grad_norm": 0.5340322852134705, + "learning_rate": 1.3389426485679607e-05, + "loss": 0.0401, + "step": 2009 + }, + { + "epoch": 1.5715402658326818, + "grad_norm": 0.3630266487598419, + "learning_rate": 1.334298649206187e-05, + "loss": 0.07, + "step": 2010 + }, + { + "epoch": 1.5723221266614542, + "grad_norm": 0.437450110912323, + "learning_rate": 1.3296614770993293e-05, + "loss": 0.0637, + "step": 2011 + }, + { + "epoch": 1.5731039874902266, + "grad_norm": 0.3910987377166748, + "learning_rate": 1.32503114088398e-05, + "loss": 0.051, + "step": 2012 + }, + { + "epoch": 1.5738858483189992, + "grad_norm": 0.38609978556632996, + "learning_rate": 1.320407649183995e-05, + "loss": 0.0498, + "step": 2013 + }, + { + "epoch": 1.5746677091477717, + "grad_norm": 0.44294193387031555, + "learning_rate": 1.3157910106104836e-05, + "loss": 0.0445, + "step": 2014 + }, + { + "epoch": 1.575449569976544, + "grad_norm": 0.6343628764152527, + "learning_rate": 1.3111812337617924e-05, + "loss": 0.0584, + "step": 2015 + }, + { + "epoch": 1.5762314308053167, + "grad_norm": 0.3056366741657257, + "learning_rate": 1.3065783272234878e-05, + "loss": 0.0639, + "step": 2016 + }, + { + "epoch": 1.577013291634089, + "grad_norm": 0.3845876157283783, + "learning_rate": 1.3019822995683395e-05, + "loss": 0.0352, + "step": 2017 + }, + { + "epoch": 1.5777951524628615, + "grad_norm": 0.40483787655830383, + "learning_rate": 1.2973931593563048e-05, + "loss": 0.0618, + "step": 2018 + }, + { + "epoch": 1.5785770132916341, + "grad_norm": 0.4752943813800812, + "learning_rate": 1.2928109151345196e-05, + "loss": 0.0547, + "step": 2019 + }, + { + "epoch": 1.5793588741204065, + "grad_norm": 0.4694390296936035, + "learning_rate": 1.2882355754372672e-05, + "loss": 0.0574, + "step": 2020 + }, + { + "epoch": 1.580140734949179, + "grad_norm": 0.38313254714012146, + "learning_rate": 1.2836671487859754e-05, + "loss": 0.05, + "step": 2021 + }, + { + "epoch": 1.5809225957779516, + "grad_norm": 0.6127452254295349, + "learning_rate": 1.2791056436892013e-05, + "loss": 0.067, + "step": 2022 + }, + { + "epoch": 1.581704456606724, + "grad_norm": 0.4264466464519501, + "learning_rate": 1.274551068642601e-05, + "loss": 0.0547, + "step": 2023 + }, + { + "epoch": 1.5824863174354964, + "grad_norm": 0.37022411823272705, + "learning_rate": 1.2700034321289333e-05, + "loss": 0.0604, + "step": 2024 + }, + { + "epoch": 1.583268178264269, + "grad_norm": 0.4471640884876251, + "learning_rate": 1.2654627426180277e-05, + "loss": 0.0732, + "step": 2025 + }, + { + "epoch": 1.5840500390930414, + "grad_norm": 0.358602374792099, + "learning_rate": 1.2609290085667785e-05, + "loss": 0.0555, + "step": 2026 + }, + { + "epoch": 1.5848318999218138, + "grad_norm": 0.49344927072525024, + "learning_rate": 1.2564022384191243e-05, + "loss": 0.0525, + "step": 2027 + }, + { + "epoch": 1.5856137607505865, + "grad_norm": 0.43709656596183777, + "learning_rate": 1.2518824406060336e-05, + "loss": 0.0855, + "step": 2028 + }, + { + "epoch": 1.5863956215793589, + "grad_norm": 0.38000303506851196, + "learning_rate": 1.2473696235454896e-05, + "loss": 0.0459, + "step": 2029 + }, + { + "epoch": 1.5871774824081313, + "grad_norm": 0.3962831497192383, + "learning_rate": 1.2428637956424743e-05, + "loss": 0.0604, + "step": 2030 + }, + { + "epoch": 1.587959343236904, + "grad_norm": 0.42037636041641235, + "learning_rate": 1.2383649652889501e-05, + "loss": 0.0399, + "step": 2031 + }, + { + "epoch": 1.5887412040656763, + "grad_norm": 0.4292720556259155, + "learning_rate": 1.2338731408638532e-05, + "loss": 0.0737, + "step": 2032 + }, + { + "epoch": 1.5895230648944487, + "grad_norm": 0.496189683675766, + "learning_rate": 1.2293883307330622e-05, + "loss": 0.0595, + "step": 2033 + }, + { + "epoch": 1.5903049257232214, + "grad_norm": 0.49012449383735657, + "learning_rate": 1.2249105432493996e-05, + "loss": 0.0721, + "step": 2034 + }, + { + "epoch": 1.5910867865519938, + "grad_norm": 0.3142451345920563, + "learning_rate": 1.2204397867526069e-05, + "loss": 0.0889, + "step": 2035 + }, + { + "epoch": 1.5918686473807662, + "grad_norm": 0.48869624733924866, + "learning_rate": 1.2159760695693239e-05, + "loss": 0.0468, + "step": 2036 + }, + { + "epoch": 1.5926505082095388, + "grad_norm": 0.3193237781524658, + "learning_rate": 1.2115194000130903e-05, + "loss": 0.0226, + "step": 2037 + }, + { + "epoch": 1.5934323690383112, + "grad_norm": 0.37936335802078247, + "learning_rate": 1.2070697863843127e-05, + "loss": 0.048, + "step": 2038 + }, + { + "epoch": 1.5942142298670836, + "grad_norm": 0.3389032185077667, + "learning_rate": 1.202627236970259e-05, + "loss": 0.039, + "step": 2039 + }, + { + "epoch": 1.5949960906958562, + "grad_norm": 0.2734612226486206, + "learning_rate": 1.1981917600450387e-05, + "loss": 0.0353, + "step": 2040 + }, + { + "epoch": 1.5957779515246286, + "grad_norm": 0.3461781144142151, + "learning_rate": 1.1937633638695883e-05, + "loss": 0.0575, + "step": 2041 + }, + { + "epoch": 1.596559812353401, + "grad_norm": 0.3470762073993683, + "learning_rate": 1.1893420566916636e-05, + "loss": 0.0592, + "step": 2042 + }, + { + "epoch": 1.5973416731821737, + "grad_norm": 0.41323909163475037, + "learning_rate": 1.1849278467458048e-05, + "loss": 0.033, + "step": 2043 + }, + { + "epoch": 1.598123534010946, + "grad_norm": 0.3009279668331146, + "learning_rate": 1.1805207422533459e-05, + "loss": 0.0318, + "step": 2044 + }, + { + "epoch": 1.5989053948397185, + "grad_norm": 0.3985391855239868, + "learning_rate": 1.1761207514223822e-05, + "loss": 0.0451, + "step": 2045 + }, + { + "epoch": 1.5996872556684911, + "grad_norm": 0.43245643377304077, + "learning_rate": 1.1717278824477556e-05, + "loss": 0.0732, + "step": 2046 + }, + { + "epoch": 1.6004691164972635, + "grad_norm": 0.5485223531723022, + "learning_rate": 1.1673421435110522e-05, + "loss": 0.08, + "step": 2047 + }, + { + "epoch": 1.601250977326036, + "grad_norm": 0.30518728494644165, + "learning_rate": 1.162963542780573e-05, + "loss": 0.0364, + "step": 2048 + }, + { + "epoch": 1.6020328381548086, + "grad_norm": 0.32443472743034363, + "learning_rate": 1.1585920884113261e-05, + "loss": 0.0504, + "step": 2049 + }, + { + "epoch": 1.602814698983581, + "grad_norm": 0.5871180295944214, + "learning_rate": 1.1542277885450098e-05, + "loss": 0.0784, + "step": 2050 + }, + { + "epoch": 1.6035965598123534, + "grad_norm": 0.321475088596344, + "learning_rate": 1.1498706513099949e-05, + "loss": 0.0671, + "step": 2051 + }, + { + "epoch": 1.604378420641126, + "grad_norm": 0.35943475365638733, + "learning_rate": 1.1455206848213196e-05, + "loss": 0.0512, + "step": 2052 + }, + { + "epoch": 1.6051602814698982, + "grad_norm": 0.40514686703681946, + "learning_rate": 1.1411778971806558e-05, + "loss": 0.0645, + "step": 2053 + }, + { + "epoch": 1.6059421422986708, + "grad_norm": 0.3564876317977905, + "learning_rate": 1.1368422964763115e-05, + "loss": 0.0744, + "step": 2054 + }, + { + "epoch": 1.6067240031274435, + "grad_norm": 0.533642590045929, + "learning_rate": 1.1325138907832122e-05, + "loss": 0.0548, + "step": 2055 + }, + { + "epoch": 1.6075058639562156, + "grad_norm": 0.4948970377445221, + "learning_rate": 1.1281926881628735e-05, + "loss": 0.0604, + "step": 2056 + }, + { + "epoch": 1.6082877247849883, + "grad_norm": 0.5011610984802246, + "learning_rate": 1.1238786966634052e-05, + "loss": 0.0722, + "step": 2057 + }, + { + "epoch": 1.609069585613761, + "grad_norm": 0.28366154432296753, + "learning_rate": 1.1195719243194813e-05, + "loss": 0.0627, + "step": 2058 + }, + { + "epoch": 1.609851446442533, + "grad_norm": 0.3866322636604309, + "learning_rate": 1.1152723791523318e-05, + "loss": 0.073, + "step": 2059 + }, + { + "epoch": 1.6106333072713057, + "grad_norm": 0.30931589007377625, + "learning_rate": 1.1109800691697254e-05, + "loss": 0.0321, + "step": 2060 + }, + { + "epoch": 1.6114151681000783, + "grad_norm": 0.3482475280761719, + "learning_rate": 1.1066950023659545e-05, + "loss": 0.0546, + "step": 2061 + }, + { + "epoch": 1.6121970289288505, + "grad_norm": 0.4044942259788513, + "learning_rate": 1.102417186721828e-05, + "loss": 0.0679, + "step": 2062 + }, + { + "epoch": 1.6129788897576232, + "grad_norm": 0.34607014060020447, + "learning_rate": 1.0981466302046406e-05, + "loss": 0.0416, + "step": 2063 + }, + { + "epoch": 1.6137607505863958, + "grad_norm": 0.34637823700904846, + "learning_rate": 1.0938833407681704e-05, + "loss": 0.0751, + "step": 2064 + }, + { + "epoch": 1.614542611415168, + "grad_norm": 0.43607819080352783, + "learning_rate": 1.0896273263526663e-05, + "loss": 0.0462, + "step": 2065 + }, + { + "epoch": 1.6153244722439406, + "grad_norm": 0.37277066707611084, + "learning_rate": 1.0853785948848166e-05, + "loss": 0.0649, + "step": 2066 + }, + { + "epoch": 1.616106333072713, + "grad_norm": 0.3339214324951172, + "learning_rate": 1.0811371542777571e-05, + "loss": 0.0581, + "step": 2067 + }, + { + "epoch": 1.6168881939014854, + "grad_norm": 0.398568719625473, + "learning_rate": 1.0769030124310364e-05, + "loss": 0.0612, + "step": 2068 + }, + { + "epoch": 1.617670054730258, + "grad_norm": 0.3037922978401184, + "learning_rate": 1.0726761772306137e-05, + "loss": 0.0366, + "step": 2069 + }, + { + "epoch": 1.6184519155590305, + "grad_norm": 0.41374528408050537, + "learning_rate": 1.0684566565488374e-05, + "loss": 0.0875, + "step": 2070 + }, + { + "epoch": 1.6192337763878029, + "grad_norm": 0.4728708863258362, + "learning_rate": 1.0642444582444322e-05, + "loss": 0.0492, + "step": 2071 + }, + { + "epoch": 1.6200156372165755, + "grad_norm": 0.49290919303894043, + "learning_rate": 1.060039590162491e-05, + "loss": 0.0442, + "step": 2072 + }, + { + "epoch": 1.620797498045348, + "grad_norm": 0.382946252822876, + "learning_rate": 1.055842060134446e-05, + "loss": 0.0516, + "step": 2073 + }, + { + "epoch": 1.6215793588741203, + "grad_norm": 0.4095887243747711, + "learning_rate": 1.0516518759780664e-05, + "loss": 0.0466, + "step": 2074 + }, + { + "epoch": 1.622361219702893, + "grad_norm": 0.48378729820251465, + "learning_rate": 1.0474690454974445e-05, + "loss": 0.0736, + "step": 2075 + }, + { + "epoch": 1.6231430805316653, + "grad_norm": 0.4431527554988861, + "learning_rate": 1.043293576482966e-05, + "loss": 0.082, + "step": 2076 + }, + { + "epoch": 1.6239249413604377, + "grad_norm": 0.40211930871009827, + "learning_rate": 1.0391254767113169e-05, + "loss": 0.0581, + "step": 2077 + }, + { + "epoch": 1.6247068021892104, + "grad_norm": 0.42459335923194885, + "learning_rate": 1.0349647539454532e-05, + "loss": 0.0887, + "step": 2078 + }, + { + "epoch": 1.6254886630179828, + "grad_norm": 0.4116522967815399, + "learning_rate": 1.0308114159345883e-05, + "loss": 0.0566, + "step": 2079 + }, + { + "epoch": 1.6262705238467552, + "grad_norm": 0.5448094010353088, + "learning_rate": 1.0266654704141888e-05, + "loss": 0.0721, + "step": 2080 + }, + { + "epoch": 1.6270523846755278, + "grad_norm": 0.45197612047195435, + "learning_rate": 1.0225269251059483e-05, + "loss": 0.0558, + "step": 2081 + }, + { + "epoch": 1.6278342455043002, + "grad_norm": 0.2840746343135834, + "learning_rate": 1.0183957877177786e-05, + "loss": 0.0485, + "step": 2082 + }, + { + "epoch": 1.6286161063330726, + "grad_norm": 0.4308018982410431, + "learning_rate": 1.0142720659437955e-05, + "loss": 0.0955, + "step": 2083 + }, + { + "epoch": 1.6293979671618453, + "grad_norm": 0.4573703110218048, + "learning_rate": 1.0101557674643003e-05, + "loss": 0.0686, + "step": 2084 + }, + { + "epoch": 1.6301798279906177, + "grad_norm": 0.4302104115486145, + "learning_rate": 1.0060468999457767e-05, + "loss": 0.0454, + "step": 2085 + }, + { + "epoch": 1.63096168881939, + "grad_norm": 0.388776034116745, + "learning_rate": 1.001945471040856e-05, + "loss": 0.0576, + "step": 2086 + }, + { + "epoch": 1.6317435496481627, + "grad_norm": 0.5159154534339905, + "learning_rate": 9.978514883883266e-06, + "loss": 0.1033, + "step": 2087 + }, + { + "epoch": 1.6325254104769351, + "grad_norm": 0.31607142090797424, + "learning_rate": 9.937649596131043e-06, + "loss": 0.0583, + "step": 2088 + }, + { + "epoch": 1.6333072713057075, + "grad_norm": 0.33039212226867676, + "learning_rate": 9.89685892326218e-06, + "loss": 0.0262, + "step": 2089 + }, + { + "epoch": 1.6340891321344801, + "grad_norm": 0.376118540763855, + "learning_rate": 9.856142941248075e-06, + "loss": 0.0573, + "step": 2090 + }, + { + "epoch": 1.6348709929632526, + "grad_norm": 0.46004632115364075, + "learning_rate": 9.815501725920972e-06, + "loss": 0.0527, + "step": 2091 + }, + { + "epoch": 1.635652853792025, + "grad_norm": 0.40226930379867554, + "learning_rate": 9.774935352973868e-06, + "loss": 0.0464, + "step": 2092 + }, + { + "epoch": 1.6364347146207976, + "grad_norm": 0.4005904197692871, + "learning_rate": 9.734443897960372e-06, + "loss": 0.0839, + "step": 2093 + }, + { + "epoch": 1.63721657544957, + "grad_norm": 0.39641985297203064, + "learning_rate": 9.694027436294567e-06, + "loss": 0.0678, + "step": 2094 + }, + { + "epoch": 1.6379984362783424, + "grad_norm": 0.6553030610084534, + "learning_rate": 9.653686043250848e-06, + "loss": 0.0562, + "step": 2095 + }, + { + "epoch": 1.638780297107115, + "grad_norm": 0.4170481860637665, + "learning_rate": 9.613419793963808e-06, + "loss": 0.0661, + "step": 2096 + }, + { + "epoch": 1.6395621579358874, + "grad_norm": 0.4887992739677429, + "learning_rate": 9.573228763428093e-06, + "loss": 0.0658, + "step": 2097 + }, + { + "epoch": 1.6403440187646599, + "grad_norm": 0.40965110063552856, + "learning_rate": 9.533113026498264e-06, + "loss": 0.0377, + "step": 2098 + }, + { + "epoch": 1.6411258795934325, + "grad_norm": 0.3160760700702667, + "learning_rate": 9.493072657888597e-06, + "loss": 0.0654, + "step": 2099 + }, + { + "epoch": 1.6419077404222049, + "grad_norm": 0.3468204140663147, + "learning_rate": 9.45310773217306e-06, + "loss": 0.0482, + "step": 2100 + }, + { + "epoch": 1.6426896012509773, + "grad_norm": 0.5230024456977844, + "learning_rate": 9.413218323785084e-06, + "loss": 0.081, + "step": 2101 + }, + { + "epoch": 1.64347146207975, + "grad_norm": 0.3241496980190277, + "learning_rate": 9.373404507017453e-06, + "loss": 0.0389, + "step": 2102 + }, + { + "epoch": 1.6442533229085223, + "grad_norm": 0.4153323173522949, + "learning_rate": 9.333666356022158e-06, + "loss": 0.0658, + "step": 2103 + }, + { + "epoch": 1.6450351837372947, + "grad_norm": 0.4620719254016876, + "learning_rate": 9.294003944810276e-06, + "loss": 0.0496, + "step": 2104 + }, + { + "epoch": 1.6458170445660674, + "grad_norm": 0.3673686385154724, + "learning_rate": 9.254417347251815e-06, + "loss": 0.0826, + "step": 2105 + }, + { + "epoch": 1.6465989053948398, + "grad_norm": 0.3582766056060791, + "learning_rate": 9.21490663707557e-06, + "loss": 0.0673, + "step": 2106 + }, + { + "epoch": 1.6473807662236122, + "grad_norm": 0.3266817331314087, + "learning_rate": 9.175471887869042e-06, + "loss": 0.0259, + "step": 2107 + }, + { + "epoch": 1.6481626270523848, + "grad_norm": 0.4447278082370758, + "learning_rate": 9.136113173078221e-06, + "loss": 0.0419, + "step": 2108 + }, + { + "epoch": 1.648944487881157, + "grad_norm": 0.3423357605934143, + "learning_rate": 9.096830566007452e-06, + "loss": 0.0458, + "step": 2109 + }, + { + "epoch": 1.6497263487099296, + "grad_norm": 0.41473570466041565, + "learning_rate": 9.05762413981941e-06, + "loss": 0.0686, + "step": 2110 + }, + { + "epoch": 1.6505082095387023, + "grad_norm": 0.3200038969516754, + "learning_rate": 9.018493967534835e-06, + "loss": 0.0623, + "step": 2111 + }, + { + "epoch": 1.6512900703674744, + "grad_norm": 0.29377713799476624, + "learning_rate": 8.979440122032457e-06, + "loss": 0.0449, + "step": 2112 + }, + { + "epoch": 1.652071931196247, + "grad_norm": 0.3613704442977905, + "learning_rate": 8.940462676048855e-06, + "loss": 0.0421, + "step": 2113 + }, + { + "epoch": 1.6528537920250197, + "grad_norm": 0.3492453992366791, + "learning_rate": 8.901561702178302e-06, + "loss": 0.0306, + "step": 2114 + }, + { + "epoch": 1.6536356528537919, + "grad_norm": 0.38384905457496643, + "learning_rate": 8.862737272872657e-06, + "loss": 0.042, + "step": 2115 + }, + { + "epoch": 1.6544175136825645, + "grad_norm": 0.33163630962371826, + "learning_rate": 8.823989460441217e-06, + "loss": 0.0475, + "step": 2116 + }, + { + "epoch": 1.6551993745113371, + "grad_norm": 0.40485212206840515, + "learning_rate": 8.78531833705058e-06, + "loss": 0.0485, + "step": 2117 + }, + { + "epoch": 1.6559812353401093, + "grad_norm": 0.3084823191165924, + "learning_rate": 8.746723974724507e-06, + "loss": 0.0573, + "step": 2118 + }, + { + "epoch": 1.656763096168882, + "grad_norm": 0.34011852741241455, + "learning_rate": 8.708206445343791e-06, + "loss": 0.0419, + "step": 2119 + }, + { + "epoch": 1.6575449569976546, + "grad_norm": 0.4085788428783417, + "learning_rate": 8.669765820646159e-06, + "loss": 0.0725, + "step": 2120 + }, + { + "epoch": 1.6583268178264268, + "grad_norm": 0.35617566108703613, + "learning_rate": 8.631402172226061e-06, + "loss": 0.0325, + "step": 2121 + }, + { + "epoch": 1.6591086786551994, + "grad_norm": 0.40245357155799866, + "learning_rate": 8.59311557153461e-06, + "loss": 0.0632, + "step": 2122 + }, + { + "epoch": 1.6598905394839718, + "grad_norm": 0.3330320715904236, + "learning_rate": 8.554906089879411e-06, + "loss": 0.037, + "step": 2123 + }, + { + "epoch": 1.6606724003127442, + "grad_norm": 0.33040401339530945, + "learning_rate": 8.516773798424427e-06, + "loss": 0.0518, + "step": 2124 + }, + { + "epoch": 1.6614542611415168, + "grad_norm": 0.307596892118454, + "learning_rate": 8.478718768189875e-06, + "loss": 0.0298, + "step": 2125 + }, + { + "epoch": 1.6622361219702892, + "grad_norm": 0.400358110666275, + "learning_rate": 8.440741070052067e-06, + "loss": 0.0582, + "step": 2126 + }, + { + "epoch": 1.6630179827990617, + "grad_norm": 0.5020748376846313, + "learning_rate": 8.402840774743281e-06, + "loss": 0.0568, + "step": 2127 + }, + { + "epoch": 1.6637998436278343, + "grad_norm": 0.41084474325180054, + "learning_rate": 8.365017952851645e-06, + "loss": 0.0482, + "step": 2128 + }, + { + "epoch": 1.6645817044566067, + "grad_norm": 0.46043017506599426, + "learning_rate": 8.327272674820974e-06, + "loss": 0.0481, + "step": 2129 + }, + { + "epoch": 1.665363565285379, + "grad_norm": 0.3039909899234772, + "learning_rate": 8.289605010950703e-06, + "loss": 0.0422, + "step": 2130 + }, + { + "epoch": 1.6661454261141517, + "grad_norm": 0.4253079891204834, + "learning_rate": 8.252015031395672e-06, + "loss": 0.0292, + "step": 2131 + }, + { + "epoch": 1.6669272869429241, + "grad_norm": 0.38544708490371704, + "learning_rate": 8.214502806166052e-06, + "loss": 0.0468, + "step": 2132 + }, + { + "epoch": 1.6677091477716965, + "grad_norm": 0.30641862750053406, + "learning_rate": 8.177068405127198e-06, + "loss": 0.0376, + "step": 2133 + }, + { + "epoch": 1.6684910086004692, + "grad_norm": 0.45573583245277405, + "learning_rate": 8.139711897999514e-06, + "loss": 0.0327, + "step": 2134 + }, + { + "epoch": 1.6692728694292416, + "grad_norm": 0.4065832197666168, + "learning_rate": 8.10243335435834e-06, + "loss": 0.1065, + "step": 2135 + }, + { + "epoch": 1.670054730258014, + "grad_norm": 0.36566224694252014, + "learning_rate": 8.065232843633807e-06, + "loss": 0.0524, + "step": 2136 + }, + { + "epoch": 1.6708365910867866, + "grad_norm": 0.3225889801979065, + "learning_rate": 8.028110435110709e-06, + "loss": 0.059, + "step": 2137 + }, + { + "epoch": 1.671618451915559, + "grad_norm": 0.5427262187004089, + "learning_rate": 7.991066197928372e-06, + "loss": 0.0879, + "step": 2138 + }, + { + "epoch": 1.6724003127443314, + "grad_norm": 0.42064690589904785, + "learning_rate": 7.954100201080538e-06, + "loss": 0.0341, + "step": 2139 + }, + { + "epoch": 1.673182173573104, + "grad_norm": 0.33844882249832153, + "learning_rate": 7.91721251341525e-06, + "loss": 0.0657, + "step": 2140 + }, + { + "epoch": 1.6739640344018765, + "grad_norm": 0.41567763686180115, + "learning_rate": 7.880403203634657e-06, + "loss": 0.0418, + "step": 2141 + }, + { + "epoch": 1.6747458952306489, + "grad_norm": 0.4430031180381775, + "learning_rate": 7.84367234029495e-06, + "loss": 0.0918, + "step": 2142 + }, + { + "epoch": 1.6755277560594215, + "grad_norm": 0.4221014976501465, + "learning_rate": 7.807019991806247e-06, + "loss": 0.0791, + "step": 2143 + }, + { + "epoch": 1.676309616888194, + "grad_norm": 0.4085227847099304, + "learning_rate": 7.770446226432393e-06, + "loss": 0.0404, + "step": 2144 + }, + { + "epoch": 1.6770914777169663, + "grad_norm": 0.41148459911346436, + "learning_rate": 7.733951112290894e-06, + "loss": 0.0617, + "step": 2145 + }, + { + "epoch": 1.677873338545739, + "grad_norm": 0.3634051978588104, + "learning_rate": 7.69753471735276e-06, + "loss": 0.0581, + "step": 2146 + }, + { + "epoch": 1.6786551993745114, + "grad_norm": 0.5589910745620728, + "learning_rate": 7.661197109442409e-06, + "loss": 0.0699, + "step": 2147 + }, + { + "epoch": 1.6794370602032838, + "grad_norm": 0.321353942155838, + "learning_rate": 7.624938356237493e-06, + "loss": 0.0262, + "step": 2148 + }, + { + "epoch": 1.6802189210320564, + "grad_norm": 0.43340280652046204, + "learning_rate": 7.588758525268808e-06, + "loss": 0.0697, + "step": 2149 + }, + { + "epoch": 1.6810007818608288, + "grad_norm": 0.42228564620018005, + "learning_rate": 7.5526576839202e-06, + "loss": 0.0596, + "step": 2150 + }, + { + "epoch": 1.6817826426896012, + "grad_norm": 0.39023756980895996, + "learning_rate": 7.516635899428331e-06, + "loss": 0.0439, + "step": 2151 + }, + { + "epoch": 1.6825645035183738, + "grad_norm": 0.4297489523887634, + "learning_rate": 7.480693238882658e-06, + "loss": 0.0642, + "step": 2152 + }, + { + "epoch": 1.6833463643471462, + "grad_norm": 0.3283405601978302, + "learning_rate": 7.444829769225286e-06, + "loss": 0.0585, + "step": 2153 + }, + { + "epoch": 1.6841282251759186, + "grad_norm": 0.3524952232837677, + "learning_rate": 7.4090455572508044e-06, + "loss": 0.0281, + "step": 2154 + }, + { + "epoch": 1.6849100860046913, + "grad_norm": 0.34404295682907104, + "learning_rate": 7.373340669606205e-06, + "loss": 0.0488, + "step": 2155 + }, + { + "epoch": 1.6856919468334637, + "grad_norm": 0.3423098623752594, + "learning_rate": 7.337715172790721e-06, + "loss": 0.0451, + "step": 2156 + }, + { + "epoch": 1.686473807662236, + "grad_norm": 0.3290472626686096, + "learning_rate": 7.30216913315574e-06, + "loss": 0.0489, + "step": 2157 + }, + { + "epoch": 1.6872556684910087, + "grad_norm": 0.38891029357910156, + "learning_rate": 7.266702616904653e-06, + "loss": 0.0571, + "step": 2158 + }, + { + "epoch": 1.6880375293197811, + "grad_norm": 0.36488309502601624, + "learning_rate": 7.231315690092733e-06, + "loss": 0.0782, + "step": 2159 + }, + { + "epoch": 1.6888193901485535, + "grad_norm": 0.3920009136199951, + "learning_rate": 7.19600841862707e-06, + "loss": 0.0322, + "step": 2160 + }, + { + "epoch": 1.6896012509773262, + "grad_norm": 0.3348820209503174, + "learning_rate": 7.1607808682663315e-06, + "loss": 0.0661, + "step": 2161 + }, + { + "epoch": 1.6903831118060983, + "grad_norm": 0.3700244426727295, + "learning_rate": 7.125633104620722e-06, + "loss": 0.0241, + "step": 2162 + }, + { + "epoch": 1.691164972634871, + "grad_norm": 0.32485297322273254, + "learning_rate": 7.090565193151905e-06, + "loss": 0.0433, + "step": 2163 + }, + { + "epoch": 1.6919468334636436, + "grad_norm": 0.411578506231308, + "learning_rate": 7.05557719917273e-06, + "loss": 0.0608, + "step": 2164 + }, + { + "epoch": 1.6927286942924158, + "grad_norm": 0.2988610863685608, + "learning_rate": 7.020669187847278e-06, + "loss": 0.0583, + "step": 2165 + }, + { + "epoch": 1.6935105551211884, + "grad_norm": 0.31292635202407837, + "learning_rate": 6.985841224190625e-06, + "loss": 0.0471, + "step": 2166 + }, + { + "epoch": 1.694292415949961, + "grad_norm": 0.3985885679721832, + "learning_rate": 6.951093373068779e-06, + "loss": 0.036, + "step": 2167 + }, + { + "epoch": 1.6950742767787332, + "grad_norm": 0.3803912401199341, + "learning_rate": 6.916425699198526e-06, + "loss": 0.0593, + "step": 2168 + }, + { + "epoch": 1.6958561376075059, + "grad_norm": 0.5552504658699036, + "learning_rate": 6.881838267147334e-06, + "loss": 0.0904, + "step": 2169 + }, + { + "epoch": 1.6966379984362785, + "grad_norm": 0.32160112261772156, + "learning_rate": 6.847331141333224e-06, + "loss": 0.0445, + "step": 2170 + }, + { + "epoch": 1.6974198592650507, + "grad_norm": 0.33420655131340027, + "learning_rate": 6.812904386024644e-06, + "loss": 0.0653, + "step": 2171 + }, + { + "epoch": 1.6982017200938233, + "grad_norm": 0.43210169672966003, + "learning_rate": 6.77855806534034e-06, + "loss": 0.0518, + "step": 2172 + }, + { + "epoch": 1.698983580922596, + "grad_norm": 0.3932843804359436, + "learning_rate": 6.744292243249306e-06, + "loss": 0.0666, + "step": 2173 + }, + { + "epoch": 1.6997654417513681, + "grad_norm": 0.37392768263816833, + "learning_rate": 6.71010698357053e-06, + "loss": 0.0488, + "step": 2174 + }, + { + "epoch": 1.7005473025801408, + "grad_norm": 0.4128730595111847, + "learning_rate": 6.676002349973027e-06, + "loss": 0.0503, + "step": 2175 + }, + { + "epoch": 1.7013291634089132, + "grad_norm": 0.511454164981842, + "learning_rate": 6.641978405975618e-06, + "loss": 0.0639, + "step": 2176 + }, + { + "epoch": 1.7021110242376856, + "grad_norm": 0.4749528169631958, + "learning_rate": 6.608035214946806e-06, + "loss": 0.0356, + "step": 2177 + }, + { + "epoch": 1.7028928850664582, + "grad_norm": 0.35844314098358154, + "learning_rate": 6.574172840104764e-06, + "loss": 0.0618, + "step": 2178 + }, + { + "epoch": 1.7036747458952306, + "grad_norm": 0.361444890499115, + "learning_rate": 6.540391344517105e-06, + "loss": 0.0326, + "step": 2179 + }, + { + "epoch": 1.704456606724003, + "grad_norm": 0.4428097605705261, + "learning_rate": 6.5066907911008165e-06, + "loss": 0.0527, + "step": 2180 + }, + { + "epoch": 1.7052384675527756, + "grad_norm": 0.3516511023044586, + "learning_rate": 6.473071242622131e-06, + "loss": 0.0325, + "step": 2181 + }, + { + "epoch": 1.706020328381548, + "grad_norm": 0.3570480942726135, + "learning_rate": 6.439532761696399e-06, + "loss": 0.0347, + "step": 2182 + }, + { + "epoch": 1.7068021892103205, + "grad_norm": 0.49399954080581665, + "learning_rate": 6.406075410788037e-06, + "loss": 0.0473, + "step": 2183 + }, + { + "epoch": 1.707584050039093, + "grad_norm": 0.36502349376678467, + "learning_rate": 6.372699252210274e-06, + "loss": 0.0332, + "step": 2184 + }, + { + "epoch": 1.7083659108678655, + "grad_norm": 0.30636587738990784, + "learning_rate": 6.339404348125205e-06, + "loss": 0.0461, + "step": 2185 + }, + { + "epoch": 1.709147771696638, + "grad_norm": 0.4163917899131775, + "learning_rate": 6.3061907605435565e-06, + "loss": 0.0454, + "step": 2186 + }, + { + "epoch": 1.7099296325254105, + "grad_norm": 0.33639055490493774, + "learning_rate": 6.273058551324568e-06, + "loss": 0.0515, + "step": 2187 + }, + { + "epoch": 1.710711493354183, + "grad_norm": 0.3798970580101013, + "learning_rate": 6.240007782175983e-06, + "loss": 0.0692, + "step": 2188 + }, + { + "epoch": 1.7114933541829553, + "grad_norm": 0.5719696283340454, + "learning_rate": 6.207038514653818e-06, + "loss": 0.0819, + "step": 2189 + }, + { + "epoch": 1.712275215011728, + "grad_norm": 0.36644482612609863, + "learning_rate": 6.174150810162315e-06, + "loss": 0.0527, + "step": 2190 + }, + { + "epoch": 1.7130570758405004, + "grad_norm": 0.34109747409820557, + "learning_rate": 6.141344729953802e-06, + "loss": 0.0265, + "step": 2191 + }, + { + "epoch": 1.7138389366692728, + "grad_norm": 0.46717700362205505, + "learning_rate": 6.108620335128573e-06, + "loss": 0.0647, + "step": 2192 + }, + { + "epoch": 1.7146207974980454, + "grad_norm": 0.28352048993110657, + "learning_rate": 6.075977686634831e-06, + "loss": 0.0276, + "step": 2193 + }, + { + "epoch": 1.7154026583268178, + "grad_norm": 0.4177485406398773, + "learning_rate": 6.043416845268457e-06, + "loss": 0.0571, + "step": 2194 + }, + { + "epoch": 1.7161845191555902, + "grad_norm": 0.4173346161842346, + "learning_rate": 6.010937871673017e-06, + "loss": 0.0509, + "step": 2195 + }, + { + "epoch": 1.7169663799843629, + "grad_norm": 0.3966216444969177, + "learning_rate": 5.978540826339607e-06, + "loss": 0.0503, + "step": 2196 + }, + { + "epoch": 1.7177482408131353, + "grad_norm": 0.36653560400009155, + "learning_rate": 5.94622576960669e-06, + "loss": 0.0518, + "step": 2197 + }, + { + "epoch": 1.7185301016419077, + "grad_norm": 0.31493353843688965, + "learning_rate": 5.913992761660081e-06, + "loss": 0.0784, + "step": 2198 + }, + { + "epoch": 1.7193119624706803, + "grad_norm": 0.5782929062843323, + "learning_rate": 5.88184186253275e-06, + "loss": 0.0672, + "step": 2199 + }, + { + "epoch": 1.7200938232994527, + "grad_norm": 0.3939312696456909, + "learning_rate": 5.849773132104741e-06, + "loss": 0.0375, + "step": 2200 + }, + { + "epoch": 1.7200938232994527, + "eval_loss": 0.12024590373039246, + "eval_runtime": 13.3539, + "eval_samples_per_second": 3.894, + "eval_steps_per_second": 0.973, + "step": 2200 + }, + { + "epoch": 1.7208756841282251, + "grad_norm": 0.4152418375015259, + "learning_rate": 5.817786630103067e-06, + "loss": 0.0504, + "step": 2201 + }, + { + "epoch": 1.7216575449569977, + "grad_norm": 0.4392826557159424, + "learning_rate": 5.785882416101595e-06, + "loss": 0.0873, + "step": 2202 + }, + { + "epoch": 1.7224394057857702, + "grad_norm": 0.4337979853153229, + "learning_rate": 5.754060549520956e-06, + "loss": 0.0365, + "step": 2203 + }, + { + "epoch": 1.7232212666145426, + "grad_norm": 0.4514532685279846, + "learning_rate": 5.722321089628363e-06, + "loss": 0.059, + "step": 2204 + }, + { + "epoch": 1.7240031274433152, + "grad_norm": 0.5406986474990845, + "learning_rate": 5.690664095537568e-06, + "loss": 0.0937, + "step": 2205 + }, + { + "epoch": 1.7247849882720876, + "grad_norm": 0.470359742641449, + "learning_rate": 5.659089626208769e-06, + "loss": 0.0601, + "step": 2206 + }, + { + "epoch": 1.72556684910086, + "grad_norm": 0.39090287685394287, + "learning_rate": 5.627597740448398e-06, + "loss": 0.0379, + "step": 2207 + }, + { + "epoch": 1.7263487099296326, + "grad_norm": 0.4238806664943695, + "learning_rate": 5.59618849690915e-06, + "loss": 0.0821, + "step": 2208 + }, + { + "epoch": 1.727130570758405, + "grad_norm": 0.41596564650535583, + "learning_rate": 5.5648619540897395e-06, + "loss": 0.0316, + "step": 2209 + }, + { + "epoch": 1.7279124315871774, + "grad_norm": 0.47083380818367004, + "learning_rate": 5.533618170334898e-06, + "loss": 0.0437, + "step": 2210 + }, + { + "epoch": 1.72869429241595, + "grad_norm": 0.3975255787372589, + "learning_rate": 5.502457203835187e-06, + "loss": 0.0536, + "step": 2211 + }, + { + "epoch": 1.7294761532447225, + "grad_norm": 0.3355897068977356, + "learning_rate": 5.47137911262694e-06, + "loss": 0.0505, + "step": 2212 + }, + { + "epoch": 1.7302580140734949, + "grad_norm": 0.3098110854625702, + "learning_rate": 5.4403839545921595e-06, + "loss": 0.0384, + "step": 2213 + }, + { + "epoch": 1.7310398749022675, + "grad_norm": 0.5140456557273865, + "learning_rate": 5.409471787458342e-06, + "loss": 0.0551, + "step": 2214 + }, + { + "epoch": 1.73182173573104, + "grad_norm": 0.3814539611339569, + "learning_rate": 5.378642668798428e-06, + "loss": 0.0835, + "step": 2215 + }, + { + "epoch": 1.7326035965598123, + "grad_norm": 0.4870288670063019, + "learning_rate": 5.347896656030738e-06, + "loss": 0.0863, + "step": 2216 + }, + { + "epoch": 1.733385457388585, + "grad_norm": 0.406101793050766, + "learning_rate": 5.317233806418708e-06, + "loss": 0.0615, + "step": 2217 + }, + { + "epoch": 1.7341673182173571, + "grad_norm": 0.3656727373600006, + "learning_rate": 5.286654177070988e-06, + "loss": 0.05, + "step": 2218 + }, + { + "epoch": 1.7349491790461298, + "grad_norm": 0.5171136856079102, + "learning_rate": 5.2561578249411824e-06, + "loss": 0.0475, + "step": 2219 + }, + { + "epoch": 1.7357310398749024, + "grad_norm": 0.5292237997055054, + "learning_rate": 5.2257448068277705e-06, + "loss": 0.047, + "step": 2220 + }, + { + "epoch": 1.7365129007036746, + "grad_norm": 0.40704137086868286, + "learning_rate": 5.19541517937408e-06, + "loss": 0.0386, + "step": 2221 + }, + { + "epoch": 1.7372947615324472, + "grad_norm": 0.36376097798347473, + "learning_rate": 5.165168999068098e-06, + "loss": 0.0565, + "step": 2222 + }, + { + "epoch": 1.7380766223612198, + "grad_norm": 0.5038173794746399, + "learning_rate": 5.135006322242386e-06, + "loss": 0.0557, + "step": 2223 + }, + { + "epoch": 1.738858483189992, + "grad_norm": 0.3662266433238983, + "learning_rate": 5.1049272050740005e-06, + "loss": 0.0541, + "step": 2224 + }, + { + "epoch": 1.7396403440187647, + "grad_norm": 0.483214795589447, + "learning_rate": 5.074931703584352e-06, + "loss": 0.038, + "step": 2225 + }, + { + "epoch": 1.7404222048475373, + "grad_norm": 0.31406158208847046, + "learning_rate": 5.045019873639162e-06, + "loss": 0.0234, + "step": 2226 + }, + { + "epoch": 1.7412040656763095, + "grad_norm": 0.4109937250614166, + "learning_rate": 5.015191770948241e-06, + "loss": 0.0489, + "step": 2227 + }, + { + "epoch": 1.741985926505082, + "grad_norm": 0.30827614665031433, + "learning_rate": 4.985447451065528e-06, + "loss": 0.0388, + "step": 2228 + }, + { + "epoch": 1.7427677873338547, + "grad_norm": 0.44419851899147034, + "learning_rate": 4.955786969388909e-06, + "loss": 0.0542, + "step": 2229 + }, + { + "epoch": 1.743549648162627, + "grad_norm": 0.3911994695663452, + "learning_rate": 4.926210381160074e-06, + "loss": 0.0371, + "step": 2230 + }, + { + "epoch": 1.7443315089913995, + "grad_norm": 0.30713513493537903, + "learning_rate": 4.896717741464524e-06, + "loss": 0.0444, + "step": 2231 + }, + { + "epoch": 1.745113369820172, + "grad_norm": 0.3942602276802063, + "learning_rate": 4.8673091052313826e-06, + "loss": 0.0603, + "step": 2232 + }, + { + "epoch": 1.7458952306489444, + "grad_norm": 0.36613914370536804, + "learning_rate": 4.837984527233314e-06, + "loss": 0.0444, + "step": 2233 + }, + { + "epoch": 1.746677091477717, + "grad_norm": 0.32284703850746155, + "learning_rate": 4.808744062086446e-06, + "loss": 0.0366, + "step": 2234 + }, + { + "epoch": 1.7474589523064894, + "grad_norm": 0.39046886563301086, + "learning_rate": 4.77958776425022e-06, + "loss": 0.0509, + "step": 2235 + }, + { + "epoch": 1.7482408131352618, + "grad_norm": 0.5282332301139832, + "learning_rate": 4.750515688027351e-06, + "loss": 0.0912, + "step": 2236 + }, + { + "epoch": 1.7490226739640344, + "grad_norm": 0.490951806306839, + "learning_rate": 4.721527887563659e-06, + "loss": 0.0695, + "step": 2237 + }, + { + "epoch": 1.7498045347928068, + "grad_norm": 0.3287993371486664, + "learning_rate": 4.69262441684804e-06, + "loss": 0.0414, + "step": 2238 + }, + { + "epoch": 1.7505863956215792, + "grad_norm": 0.45448336005210876, + "learning_rate": 4.663805329712318e-06, + "loss": 0.0385, + "step": 2239 + }, + { + "epoch": 1.7513682564503519, + "grad_norm": 0.49318641424179077, + "learning_rate": 4.635070679831116e-06, + "loss": 0.0635, + "step": 2240 + }, + { + "epoch": 1.7521501172791243, + "grad_norm": 0.38292163610458374, + "learning_rate": 4.6064205207218546e-06, + "loss": 0.0357, + "step": 2241 + }, + { + "epoch": 1.7529319781078967, + "grad_norm": 0.3284981846809387, + "learning_rate": 4.5778549057445556e-06, + "loss": 0.024, + "step": 2242 + }, + { + "epoch": 1.7537138389366693, + "grad_norm": 0.35669028759002686, + "learning_rate": 4.549373888101793e-06, + "loss": 0.0276, + "step": 2243 + }, + { + "epoch": 1.7544956997654417, + "grad_norm": 0.4276807904243469, + "learning_rate": 4.5209775208385704e-06, + "loss": 0.039, + "step": 2244 + }, + { + "epoch": 1.7552775605942141, + "grad_norm": 0.36494016647338867, + "learning_rate": 4.492665856842249e-06, + "loss": 0.0509, + "step": 2245 + }, + { + "epoch": 1.7560594214229868, + "grad_norm": 0.30909621715545654, + "learning_rate": 4.464438948842414e-06, + "loss": 0.0242, + "step": 2246 + }, + { + "epoch": 1.7568412822517592, + "grad_norm": 0.3083038032054901, + "learning_rate": 4.4362968494108145e-06, + "loss": 0.0909, + "step": 2247 + }, + { + "epoch": 1.7576231430805316, + "grad_norm": 0.35672706365585327, + "learning_rate": 4.408239610961223e-06, + "loss": 0.0487, + "step": 2248 + }, + { + "epoch": 1.7584050039093042, + "grad_norm": 0.40354442596435547, + "learning_rate": 4.3802672857494006e-06, + "loss": 0.0621, + "step": 2249 + }, + { + "epoch": 1.7591868647380766, + "grad_norm": 0.532529354095459, + "learning_rate": 4.352379925872901e-06, + "loss": 0.0866, + "step": 2250 + }, + { + "epoch": 1.759968725566849, + "grad_norm": 0.39215683937072754, + "learning_rate": 4.324577583271089e-06, + "loss": 0.0339, + "step": 2251 + }, + { + "epoch": 1.7607505863956217, + "grad_norm": 0.383793443441391, + "learning_rate": 4.29686030972496e-06, + "loss": 0.0491, + "step": 2252 + }, + { + "epoch": 1.761532447224394, + "grad_norm": 0.49607357382774353, + "learning_rate": 4.269228156857069e-06, + "loss": 0.0486, + "step": 2253 + }, + { + "epoch": 1.7623143080531665, + "grad_norm": 0.43889257311820984, + "learning_rate": 4.241681176131451e-06, + "loss": 0.0564, + "step": 2254 + }, + { + "epoch": 1.763096168881939, + "grad_norm": 0.3303699195384979, + "learning_rate": 4.2142194188534934e-06, + "loss": 0.046, + "step": 2255 + }, + { + "epoch": 1.7638780297107115, + "grad_norm": 0.3569354712963104, + "learning_rate": 4.186842936169877e-06, + "loss": 0.0357, + "step": 2256 + }, + { + "epoch": 1.764659890539484, + "grad_norm": 0.3961658179759979, + "learning_rate": 4.159551779068438e-06, + "loss": 0.0538, + "step": 2257 + }, + { + "epoch": 1.7654417513682565, + "grad_norm": 0.38139376044273376, + "learning_rate": 4.132345998378112e-06, + "loss": 0.0565, + "step": 2258 + }, + { + "epoch": 1.766223612197029, + "grad_norm": 0.38264214992523193, + "learning_rate": 4.1052256447688285e-06, + "loss": 0.0461, + "step": 2259 + }, + { + "epoch": 1.7670054730258014, + "grad_norm": 0.34434980154037476, + "learning_rate": 4.078190768751389e-06, + "loss": 0.0373, + "step": 2260 + }, + { + "epoch": 1.767787333854574, + "grad_norm": 0.4508064091205597, + "learning_rate": 4.051241420677427e-06, + "loss": 0.0995, + "step": 2261 + }, + { + "epoch": 1.7685691946833464, + "grad_norm": 0.6263595223426819, + "learning_rate": 4.024377650739264e-06, + "loss": 0.0725, + "step": 2262 + }, + { + "epoch": 1.7693510555121188, + "grad_norm": 0.3575042486190796, + "learning_rate": 3.997599508969829e-06, + "loss": 0.0541, + "step": 2263 + }, + { + "epoch": 1.7701329163408914, + "grad_norm": 0.5154090523719788, + "learning_rate": 3.970907045242583e-06, + "loss": 0.0544, + "step": 2264 + }, + { + "epoch": 1.7709147771696638, + "grad_norm": 0.362200528383255, + "learning_rate": 3.9443003092714095e-06, + "loss": 0.0471, + "step": 2265 + }, + { + "epoch": 1.7716966379984362, + "grad_norm": 0.4568684995174408, + "learning_rate": 3.9177793506105285e-06, + "loss": 0.0969, + "step": 2266 + }, + { + "epoch": 1.7724784988272089, + "grad_norm": 0.3077591359615326, + "learning_rate": 3.891344218654403e-06, + "loss": 0.0279, + "step": 2267 + }, + { + "epoch": 1.7732603596559813, + "grad_norm": 0.4141885042190552, + "learning_rate": 3.864994962637647e-06, + "loss": 0.0481, + "step": 2268 + }, + { + "epoch": 1.7740422204847537, + "grad_norm": 0.4456418454647064, + "learning_rate": 3.8387316316349285e-06, + "loss": 0.0602, + "step": 2269 + }, + { + "epoch": 1.7748240813135263, + "grad_norm": 0.32831570506095886, + "learning_rate": 3.8125542745608756e-06, + "loss": 0.0255, + "step": 2270 + }, + { + "epoch": 1.7756059421422987, + "grad_norm": 0.429839164018631, + "learning_rate": 3.7864629401700214e-06, + "loss": 0.0412, + "step": 2271 + }, + { + "epoch": 1.7763878029710711, + "grad_norm": 0.4667169749736786, + "learning_rate": 3.7604576770566724e-06, + "loss": 0.0715, + "step": 2272 + }, + { + "epoch": 1.7771696637998438, + "grad_norm": 0.34892037510871887, + "learning_rate": 3.7345385336547856e-06, + "loss": 0.0806, + "step": 2273 + }, + { + "epoch": 1.777951524628616, + "grad_norm": 0.5732429623603821, + "learning_rate": 3.7087055582379957e-06, + "loss": 0.062, + "step": 2274 + }, + { + "epoch": 1.7787333854573886, + "grad_norm": 0.44712191820144653, + "learning_rate": 3.682958798919406e-06, + "loss": 0.0449, + "step": 2275 + }, + { + "epoch": 1.7795152462861612, + "grad_norm": 0.28738516569137573, + "learning_rate": 3.6572983036515572e-06, + "loss": 0.058, + "step": 2276 + }, + { + "epoch": 1.7802971071149334, + "grad_norm": 0.401889443397522, + "learning_rate": 3.631724120226321e-06, + "loss": 0.0501, + "step": 2277 + }, + { + "epoch": 1.781078967943706, + "grad_norm": 0.37569504976272583, + "learning_rate": 3.6062362962748176e-06, + "loss": 0.0419, + "step": 2278 + }, + { + "epoch": 1.7818608287724786, + "grad_norm": 0.39430519938468933, + "learning_rate": 3.5808348792673364e-06, + "loss": 0.0551, + "step": 2279 + }, + { + "epoch": 1.7826426896012508, + "grad_norm": 0.3892035484313965, + "learning_rate": 3.5555199165132168e-06, + "loss": 0.0325, + "step": 2280 + }, + { + "epoch": 1.7834245504300235, + "grad_norm": 0.4657302796840668, + "learning_rate": 3.5302914551608112e-06, + "loss": 0.0612, + "step": 2281 + }, + { + "epoch": 1.784206411258796, + "grad_norm": 0.4596342444419861, + "learning_rate": 3.5051495421973223e-06, + "loss": 0.0566, + "step": 2282 + }, + { + "epoch": 1.7849882720875683, + "grad_norm": 0.39085909724235535, + "learning_rate": 3.480094224448788e-06, + "loss": 0.0394, + "step": 2283 + }, + { + "epoch": 1.785770132916341, + "grad_norm": 0.4541622996330261, + "learning_rate": 3.4551255485799627e-06, + "loss": 0.0432, + "step": 2284 + }, + { + "epoch": 1.7865519937451135, + "grad_norm": 0.4123796224594116, + "learning_rate": 3.4302435610942372e-06, + "loss": 0.0525, + "step": 2285 + }, + { + "epoch": 1.7873338545738857, + "grad_norm": 0.5204493403434753, + "learning_rate": 3.4054483083335255e-06, + "loss": 0.0732, + "step": 2286 + }, + { + "epoch": 1.7881157154026583, + "grad_norm": 0.41093361377716064, + "learning_rate": 3.3807398364782307e-06, + "loss": 0.04, + "step": 2287 + }, + { + "epoch": 1.7888975762314308, + "grad_norm": 0.4056856334209442, + "learning_rate": 3.3561181915471028e-06, + "loss": 0.0627, + "step": 2288 + }, + { + "epoch": 1.7896794370602032, + "grad_norm": 0.3035411238670349, + "learning_rate": 3.331583419397194e-06, + "loss": 0.0433, + "step": 2289 + }, + { + "epoch": 1.7904612978889758, + "grad_norm": 0.42910125851631165, + "learning_rate": 3.307135565723746e-06, + "loss": 0.067, + "step": 2290 + }, + { + "epoch": 1.7912431587177482, + "grad_norm": 0.38780465722084045, + "learning_rate": 3.2827746760601573e-06, + "loss": 0.0511, + "step": 2291 + }, + { + "epoch": 1.7920250195465206, + "grad_norm": 0.3865264654159546, + "learning_rate": 3.2585007957777967e-06, + "loss": 0.0544, + "step": 2292 + }, + { + "epoch": 1.7928068803752932, + "grad_norm": 0.354108065366745, + "learning_rate": 3.2343139700860168e-06, + "loss": 0.0591, + "step": 2293 + }, + { + "epoch": 1.7935887412040656, + "grad_norm": 0.37514495849609375, + "learning_rate": 3.2102142440320447e-06, + "loss": 0.0441, + "step": 2294 + }, + { + "epoch": 1.794370602032838, + "grad_norm": 0.38472533226013184, + "learning_rate": 3.186201662500865e-06, + "loss": 0.0433, + "step": 2295 + }, + { + "epoch": 1.7951524628616107, + "grad_norm": 0.41590240597724915, + "learning_rate": 3.1622762702151585e-06, + "loss": 0.0644, + "step": 2296 + }, + { + "epoch": 1.795934323690383, + "grad_norm": 0.43187642097473145, + "learning_rate": 3.138438111735231e-06, + "loss": 0.0474, + "step": 2297 + }, + { + "epoch": 1.7967161845191555, + "grad_norm": 0.4443909525871277, + "learning_rate": 3.1146872314589127e-06, + "loss": 0.0495, + "step": 2298 + }, + { + "epoch": 1.7974980453479281, + "grad_norm": 0.38686707615852356, + "learning_rate": 3.0910236736214794e-06, + "loss": 0.0621, + "step": 2299 + }, + { + "epoch": 1.7982799061767005, + "grad_norm": 0.47035327553749084, + "learning_rate": 3.067447482295577e-06, + "loss": 0.0824, + "step": 2300 + }, + { + "epoch": 1.799061767005473, + "grad_norm": 0.4474782645702362, + "learning_rate": 3.043958701391114e-06, + "loss": 0.0807, + "step": 2301 + }, + { + "epoch": 1.7998436278342456, + "grad_norm": 0.3506847023963928, + "learning_rate": 3.0205573746552364e-06, + "loss": 0.0351, + "step": 2302 + }, + { + "epoch": 1.800625488663018, + "grad_norm": 0.39736175537109375, + "learning_rate": 2.9972435456721627e-06, + "loss": 0.0495, + "step": 2303 + }, + { + "epoch": 1.8014073494917904, + "grad_norm": 0.35211628675460815, + "learning_rate": 2.9740172578632096e-06, + "loss": 0.0339, + "step": 2304 + }, + { + "epoch": 1.802189210320563, + "grad_norm": 0.42447611689567566, + "learning_rate": 2.9508785544865856e-06, + "loss": 0.0452, + "step": 2305 + }, + { + "epoch": 1.8029710711493354, + "grad_norm": 0.426352858543396, + "learning_rate": 2.9278274786374237e-06, + "loss": 0.043, + "step": 2306 + }, + { + "epoch": 1.8037529319781078, + "grad_norm": 0.4002751111984253, + "learning_rate": 2.9048640732476317e-06, + "loss": 0.0583, + "step": 2307 + }, + { + "epoch": 1.8045347928068804, + "grad_norm": 0.4455183148384094, + "learning_rate": 2.8819883810858428e-06, + "loss": 0.065, + "step": 2308 + }, + { + "epoch": 1.8053166536356529, + "grad_norm": 0.432157039642334, + "learning_rate": 2.8592004447573207e-06, + "loss": 0.0337, + "step": 2309 + }, + { + "epoch": 1.8060985144644253, + "grad_norm": 0.3376258611679077, + "learning_rate": 2.836500306703882e-06, + "loss": 0.0523, + "step": 2310 + }, + { + "epoch": 1.806880375293198, + "grad_norm": 0.3891136050224304, + "learning_rate": 2.813888009203841e-06, + "loss": 0.0566, + "step": 2311 + }, + { + "epoch": 1.8076622361219703, + "grad_norm": 0.48112839460372925, + "learning_rate": 2.791363594371893e-06, + "loss": 0.0487, + "step": 2312 + }, + { + "epoch": 1.8084440969507427, + "grad_norm": 0.422264039516449, + "learning_rate": 2.768927104159058e-06, + "loss": 0.0889, + "step": 2313 + }, + { + "epoch": 1.8092259577795153, + "grad_norm": 0.37410929799079895, + "learning_rate": 2.7465785803526155e-06, + "loss": 0.0477, + "step": 2314 + }, + { + "epoch": 1.8100078186082877, + "grad_norm": 0.4331490099430084, + "learning_rate": 2.724318064575976e-06, + "loss": 0.0385, + "step": 2315 + }, + { + "epoch": 1.8107896794370602, + "grad_norm": 0.3773462772369385, + "learning_rate": 2.7021455982886747e-06, + "loss": 0.0225, + "step": 2316 + }, + { + "epoch": 1.8115715402658328, + "grad_norm": 0.40394389629364014, + "learning_rate": 2.6800612227862453e-06, + "loss": 0.0407, + "step": 2317 + }, + { + "epoch": 1.8123534010946052, + "grad_norm": 0.6558087468147278, + "learning_rate": 2.658064979200131e-06, + "loss": 0.0767, + "step": 2318 + }, + { + "epoch": 1.8131352619233776, + "grad_norm": 0.359089732170105, + "learning_rate": 2.6361569084976723e-06, + "loss": 0.0374, + "step": 2319 + }, + { + "epoch": 1.8139171227521502, + "grad_norm": 0.47283199429512024, + "learning_rate": 2.6143370514819634e-06, + "loss": 0.0693, + "step": 2320 + }, + { + "epoch": 1.8146989835809226, + "grad_norm": 0.35094255208969116, + "learning_rate": 2.592605448791807e-06, + "loss": 0.0371, + "step": 2321 + }, + { + "epoch": 1.815480844409695, + "grad_norm": 0.2955813705921173, + "learning_rate": 2.5709621409016394e-06, + "loss": 0.0491, + "step": 2322 + }, + { + "epoch": 1.8162627052384677, + "grad_norm": 0.46730920672416687, + "learning_rate": 2.5494071681214483e-06, + "loss": 0.0713, + "step": 2323 + }, + { + "epoch": 1.81704456606724, + "grad_norm": 0.36728736758232117, + "learning_rate": 2.5279405705967097e-06, + "loss": 0.0405, + "step": 2324 + }, + { + "epoch": 1.8178264268960125, + "grad_norm": 0.42313656210899353, + "learning_rate": 2.5065623883082867e-06, + "loss": 0.0345, + "step": 2325 + }, + { + "epoch": 1.818608287724785, + "grad_norm": 0.33062589168548584, + "learning_rate": 2.4852726610723743e-06, + "loss": 0.0648, + "step": 2326 + }, + { + "epoch": 1.8193901485535575, + "grad_norm": 0.37844690680503845, + "learning_rate": 2.4640714285404544e-06, + "loss": 0.0443, + "step": 2327 + }, + { + "epoch": 1.82017200938233, + "grad_norm": 0.4094715118408203, + "learning_rate": 2.442958730199124e-06, + "loss": 0.0486, + "step": 2328 + }, + { + "epoch": 1.8209538702111026, + "grad_norm": 0.4287872016429901, + "learning_rate": 2.421934605370163e-06, + "loss": 0.0509, + "step": 2329 + }, + { + "epoch": 1.8217357310398747, + "grad_norm": 0.38323545455932617, + "learning_rate": 2.4009990932103322e-06, + "loss": 0.0498, + "step": 2330 + }, + { + "epoch": 1.8225175918686474, + "grad_norm": 0.45034465193748474, + "learning_rate": 2.3801522327113802e-06, + "loss": 0.0921, + "step": 2331 + }, + { + "epoch": 1.82329945269742, + "grad_norm": 0.4322207272052765, + "learning_rate": 2.359394062699932e-06, + "loss": 0.0529, + "step": 2332 + }, + { + "epoch": 1.8240813135261922, + "grad_norm": 0.5762638449668884, + "learning_rate": 2.338724621837435e-06, + "loss": 0.0752, + "step": 2333 + }, + { + "epoch": 1.8248631743549648, + "grad_norm": 0.5516983270645142, + "learning_rate": 2.318143948620094e-06, + "loss": 0.0571, + "step": 2334 + }, + { + "epoch": 1.8256450351837374, + "grad_norm": 0.3905304968357086, + "learning_rate": 2.2976520813787594e-06, + "loss": 0.0457, + "step": 2335 + }, + { + "epoch": 1.8264268960125096, + "grad_norm": 0.4931924343109131, + "learning_rate": 2.2772490582788965e-06, + "loss": 0.0542, + "step": 2336 + }, + { + "epoch": 1.8272087568412823, + "grad_norm": 0.42153871059417725, + "learning_rate": 2.2569349173205133e-06, + "loss": 0.0591, + "step": 2337 + }, + { + "epoch": 1.8279906176700549, + "grad_norm": 0.35189369320869446, + "learning_rate": 2.236709696338052e-06, + "loss": 0.0484, + "step": 2338 + }, + { + "epoch": 1.828772478498827, + "grad_norm": 0.4322154223918915, + "learning_rate": 2.2165734330003686e-06, + "loss": 0.0376, + "step": 2339 + }, + { + "epoch": 1.8295543393275997, + "grad_norm": 0.3702199459075928, + "learning_rate": 2.1965261648106305e-06, + "loss": 0.045, + "step": 2340 + }, + { + "epoch": 1.8303362001563723, + "grad_norm": 0.3591911792755127, + "learning_rate": 2.176567929106249e-06, + "loss": 0.0333, + "step": 2341 + }, + { + "epoch": 1.8311180609851445, + "grad_norm": 0.4833166003227234, + "learning_rate": 2.156698763058812e-06, + "loss": 0.0701, + "step": 2342 + }, + { + "epoch": 1.8318999218139171, + "grad_norm": 0.38630276918411255, + "learning_rate": 2.1369187036740235e-06, + "loss": 0.0843, + "step": 2343 + }, + { + "epoch": 1.8326817826426895, + "grad_norm": 0.33287540078163147, + "learning_rate": 2.117227787791648e-06, + "loss": 0.0382, + "step": 2344 + }, + { + "epoch": 1.833463643471462, + "grad_norm": 0.3747866451740265, + "learning_rate": 2.0976260520853886e-06, + "loss": 0.0516, + "step": 2345 + }, + { + "epoch": 1.8342455043002346, + "grad_norm": 0.502683699131012, + "learning_rate": 2.078113533062864e-06, + "loss": 0.0794, + "step": 2346 + }, + { + "epoch": 1.835027365129007, + "grad_norm": 0.40196695923805237, + "learning_rate": 2.0586902670655606e-06, + "loss": 0.0402, + "step": 2347 + }, + { + "epoch": 1.8358092259577794, + "grad_norm": 0.3394660949707031, + "learning_rate": 2.0393562902686737e-06, + "loss": 0.0576, + "step": 2348 + }, + { + "epoch": 1.836591086786552, + "grad_norm": 0.3652074337005615, + "learning_rate": 2.02011163868116e-06, + "loss": 0.0459, + "step": 2349 + }, + { + "epoch": 1.8373729476153244, + "grad_norm": 0.3400147557258606, + "learning_rate": 2.0009563481455775e-06, + "loss": 0.0484, + "step": 2350 + }, + { + "epoch": 1.8381548084440968, + "grad_norm": 0.43868163228034973, + "learning_rate": 1.9818904543380643e-06, + "loss": 0.0645, + "step": 2351 + }, + { + "epoch": 1.8389366692728695, + "grad_norm": 0.5077487826347351, + "learning_rate": 1.962913992768245e-06, + "loss": 0.0913, + "step": 2352 + }, + { + "epoch": 1.8397185301016419, + "grad_norm": 0.34455615282058716, + "learning_rate": 1.9440269987791914e-06, + "loss": 0.0661, + "step": 2353 + }, + { + "epoch": 1.8405003909304143, + "grad_norm": 0.3783716857433319, + "learning_rate": 1.92522950754736e-06, + "loss": 0.0284, + "step": 2354 + }, + { + "epoch": 1.841282251759187, + "grad_norm": 0.5611537098884583, + "learning_rate": 1.90652155408248e-06, + "loss": 0.0454, + "step": 2355 + }, + { + "epoch": 1.8420641125879593, + "grad_norm": 0.39524754881858826, + "learning_rate": 1.8879031732275231e-06, + "loss": 0.0629, + "step": 2356 + }, + { + "epoch": 1.8428459734167317, + "grad_norm": 0.5189679265022278, + "learning_rate": 1.8693743996586742e-06, + "loss": 0.0333, + "step": 2357 + }, + { + "epoch": 1.8436278342455044, + "grad_norm": 0.2966693341732025, + "learning_rate": 1.8509352678851732e-06, + "loss": 0.0427, + "step": 2358 + }, + { + "epoch": 1.8444096950742768, + "grad_norm": 0.38795343041419983, + "learning_rate": 1.8325858122493432e-06, + "loss": 0.0576, + "step": 2359 + }, + { + "epoch": 1.8451915559030492, + "grad_norm": 0.44317272305488586, + "learning_rate": 1.814326066926475e-06, + "loss": 0.0593, + "step": 2360 + }, + { + "epoch": 1.8459734167318218, + "grad_norm": 0.3784867525100708, + "learning_rate": 1.7961560659247646e-06, + "loss": 0.0499, + "step": 2361 + }, + { + "epoch": 1.8467552775605942, + "grad_norm": 0.39091184735298157, + "learning_rate": 1.7780758430852917e-06, + "loss": 0.0553, + "step": 2362 + }, + { + "epoch": 1.8475371383893666, + "grad_norm": 0.3573996424674988, + "learning_rate": 1.7600854320819038e-06, + "loss": 0.0552, + "step": 2363 + }, + { + "epoch": 1.8483189992181392, + "grad_norm": 0.40562674403190613, + "learning_rate": 1.7421848664211759e-06, + "loss": 0.0969, + "step": 2364 + }, + { + "epoch": 1.8491008600469117, + "grad_norm": 0.35688963532447815, + "learning_rate": 1.7243741794423619e-06, + "loss": 0.0608, + "step": 2365 + }, + { + "epoch": 1.849882720875684, + "grad_norm": 0.44463032484054565, + "learning_rate": 1.7066534043173098e-06, + "loss": 0.0487, + "step": 2366 + }, + { + "epoch": 1.8506645817044567, + "grad_norm": 0.44885513186454773, + "learning_rate": 1.6890225740504251e-06, + "loss": 0.0674, + "step": 2367 + }, + { + "epoch": 1.851446442533229, + "grad_norm": 0.4468289911746979, + "learning_rate": 1.6714817214785572e-06, + "loss": 0.0468, + "step": 2368 + }, + { + "epoch": 1.8522283033620015, + "grad_norm": 0.3201262652873993, + "learning_rate": 1.6540308792710235e-06, + "loss": 0.0447, + "step": 2369 + }, + { + "epoch": 1.8530101641907741, + "grad_norm": 0.4483521282672882, + "learning_rate": 1.63667007992947e-06, + "loss": 0.0535, + "step": 2370 + }, + { + "epoch": 1.8537920250195465, + "grad_norm": 0.39991673827171326, + "learning_rate": 1.6193993557878317e-06, + "loss": 0.0374, + "step": 2371 + }, + { + "epoch": 1.854573885848319, + "grad_norm": 0.33672747015953064, + "learning_rate": 1.6022187390123067e-06, + "loss": 0.0369, + "step": 2372 + }, + { + "epoch": 1.8553557466770916, + "grad_norm": 0.4697551131248474, + "learning_rate": 1.5851282616012653e-06, + "loss": 0.0679, + "step": 2373 + }, + { + "epoch": 1.856137607505864, + "grad_norm": 0.3470526933670044, + "learning_rate": 1.5681279553851846e-06, + "loss": 0.0429, + "step": 2374 + }, + { + "epoch": 1.8569194683346364, + "grad_norm": 0.340726375579834, + "learning_rate": 1.5512178520266096e-06, + "loss": 0.0374, + "step": 2375 + }, + { + "epoch": 1.857701329163409, + "grad_norm": 0.4430573582649231, + "learning_rate": 1.5343979830200695e-06, + "loss": 0.0436, + "step": 2376 + }, + { + "epoch": 1.8584831899921814, + "grad_norm": 0.44432032108306885, + "learning_rate": 1.5176683796920721e-06, + "loss": 0.0379, + "step": 2377 + }, + { + "epoch": 1.8592650508209538, + "grad_norm": 0.6266927719116211, + "learning_rate": 1.5010290732009658e-06, + "loss": 0.0946, + "step": 2378 + }, + { + "epoch": 1.8600469116497265, + "grad_norm": 0.40188395977020264, + "learning_rate": 1.4844800945369498e-06, + "loss": 0.0716, + "step": 2379 + }, + { + "epoch": 1.8608287724784989, + "grad_norm": 0.35556766390800476, + "learning_rate": 1.46802147452198e-06, + "loss": 0.0546, + "step": 2380 + }, + { + "epoch": 1.8616106333072713, + "grad_norm": 0.4273186922073364, + "learning_rate": 1.4516532438097196e-06, + "loss": 0.0794, + "step": 2381 + }, + { + "epoch": 1.862392494136044, + "grad_norm": 0.3742946982383728, + "learning_rate": 1.435375432885505e-06, + "loss": 0.0438, + "step": 2382 + }, + { + "epoch": 1.8631743549648163, + "grad_norm": 0.32148027420043945, + "learning_rate": 1.419188072066241e-06, + "loss": 0.044, + "step": 2383 + }, + { + "epoch": 1.8639562157935887, + "grad_norm": 0.4895723760128021, + "learning_rate": 1.403091191500383e-06, + "loss": 0.0544, + "step": 2384 + }, + { + "epoch": 1.8647380766223614, + "grad_norm": 0.390939325094223, + "learning_rate": 1.3870848211678833e-06, + "loss": 0.0558, + "step": 2385 + }, + { + "epoch": 1.8655199374511335, + "grad_norm": 0.35615217685699463, + "learning_rate": 1.371168990880095e-06, + "loss": 0.0446, + "step": 2386 + }, + { + "epoch": 1.8663017982799062, + "grad_norm": 0.3843987286090851, + "learning_rate": 1.3553437302797733e-06, + "loss": 0.0667, + "step": 2387 + }, + { + "epoch": 1.8670836591086788, + "grad_norm": 0.3831058442592621, + "learning_rate": 1.339609068840958e-06, + "loss": 0.0407, + "step": 2388 + }, + { + "epoch": 1.867865519937451, + "grad_norm": 0.4777224361896515, + "learning_rate": 1.3239650358689737e-06, + "loss": 0.0642, + "step": 2389 + }, + { + "epoch": 1.8686473807662236, + "grad_norm": 0.2945762574672699, + "learning_rate": 1.3084116605003582e-06, + "loss": 0.0595, + "step": 2390 + }, + { + "epoch": 1.8694292415949962, + "grad_norm": 0.35123440623283386, + "learning_rate": 1.292948971702773e-06, + "loss": 0.0378, + "step": 2391 + }, + { + "epoch": 1.8702111024237684, + "grad_norm": 0.287575900554657, + "learning_rate": 1.2775769982750041e-06, + "loss": 0.0515, + "step": 2392 + }, + { + "epoch": 1.870992963252541, + "grad_norm": 0.3514576256275177, + "learning_rate": 1.2622957688468717e-06, + "loss": 0.083, + "step": 2393 + }, + { + "epoch": 1.8717748240813137, + "grad_norm": 0.45359691977500916, + "learning_rate": 1.247105311879193e-06, + "loss": 0.0826, + "step": 2394 + }, + { + "epoch": 1.8725566849100859, + "grad_norm": 0.3990536332130432, + "learning_rate": 1.2320056556637205e-06, + "loss": 0.0373, + "step": 2395 + }, + { + "epoch": 1.8733385457388585, + "grad_norm": 0.5825475454330444, + "learning_rate": 1.216996828323097e-06, + "loss": 0.0875, + "step": 2396 + }, + { + "epoch": 1.8741204065676311, + "grad_norm": 0.3500322997570038, + "learning_rate": 1.2020788578107956e-06, + "loss": 0.0368, + "step": 2397 + }, + { + "epoch": 1.8749022673964033, + "grad_norm": 0.4058021605014801, + "learning_rate": 1.1872517719110743e-06, + "loss": 0.0564, + "step": 2398 + }, + { + "epoch": 1.875684128225176, + "grad_norm": 0.551173746585846, + "learning_rate": 1.1725155982389158e-06, + "loss": 0.0547, + "step": 2399 + }, + { + "epoch": 1.8764659890539483, + "grad_norm": 0.3550439476966858, + "learning_rate": 1.1578703642399935e-06, + "loss": 0.0512, + "step": 2400 + }, + { + "epoch": 1.8764659890539483, + "eval_loss": 0.12009644508361816, + "eval_runtime": 13.4244, + "eval_samples_per_second": 3.874, + "eval_steps_per_second": 0.968, + "step": 2400 + }, + { + "epoch": 1.8772478498827208, + "grad_norm": 0.4831383526325226, + "learning_rate": 1.1433160971905942e-06, + "loss": 0.0737, + "step": 2401 + }, + { + "epoch": 1.8780297107114934, + "grad_norm": 0.3398386836051941, + "learning_rate": 1.1288528241975904e-06, + "loss": 0.0464, + "step": 2402 + }, + { + "epoch": 1.8788115715402658, + "grad_norm": 0.31560376286506653, + "learning_rate": 1.1144805721983841e-06, + "loss": 0.0351, + "step": 2403 + }, + { + "epoch": 1.8795934323690382, + "grad_norm": 0.47334128618240356, + "learning_rate": 1.1001993679608414e-06, + "loss": 0.0692, + "step": 2404 + }, + { + "epoch": 1.8803752931978108, + "grad_norm": 0.4987127482891083, + "learning_rate": 1.0860092380832632e-06, + "loss": 0.067, + "step": 2405 + }, + { + "epoch": 1.8811571540265832, + "grad_norm": 0.4054931402206421, + "learning_rate": 1.0719102089943257e-06, + "loss": 0.0585, + "step": 2406 + }, + { + "epoch": 1.8819390148553556, + "grad_norm": 0.5126636624336243, + "learning_rate": 1.0579023069530346e-06, + "loss": 0.0557, + "step": 2407 + }, + { + "epoch": 1.8827208756841283, + "grad_norm": 0.3793666958808899, + "learning_rate": 1.0439855580486714e-06, + "loss": 0.0457, + "step": 2408 + }, + { + "epoch": 1.8835027365129007, + "grad_norm": 0.5941369533538818, + "learning_rate": 1.0301599882007462e-06, + "loss": 0.0597, + "step": 2409 + }, + { + "epoch": 1.884284597341673, + "grad_norm": 0.396608829498291, + "learning_rate": 1.0164256231589563e-06, + "loss": 0.072, + "step": 2410 + }, + { + "epoch": 1.8850664581704457, + "grad_norm": 0.3238108456134796, + "learning_rate": 1.0027824885031288e-06, + "loss": 0.0526, + "step": 2411 + }, + { + "epoch": 1.8858483189992181, + "grad_norm": 0.664912760257721, + "learning_rate": 9.892306096431826e-07, + "loss": 0.0993, + "step": 2412 + }, + { + "epoch": 1.8866301798279905, + "grad_norm": 0.38779711723327637, + "learning_rate": 9.75770011819066e-07, + "loss": 0.0276, + "step": 2413 + }, + { + "epoch": 1.8874120406567632, + "grad_norm": 0.42406222224235535, + "learning_rate": 9.624007201007257e-07, + "loss": 0.0347, + "step": 2414 + }, + { + "epoch": 1.8881939014855356, + "grad_norm": 0.4085008502006531, + "learning_rate": 9.491227593880492e-07, + "loss": 0.0391, + "step": 2415 + }, + { + "epoch": 1.888975762314308, + "grad_norm": 0.3823298513889313, + "learning_rate": 9.359361544108324e-07, + "loss": 0.0489, + "step": 2416 + }, + { + "epoch": 1.8897576231430806, + "grad_norm": 0.5838673114776611, + "learning_rate": 9.228409297287132e-07, + "loss": 0.0562, + "step": 2417 + }, + { + "epoch": 1.890539483971853, + "grad_norm": 0.41118496656417847, + "learning_rate": 9.098371097311431e-07, + "loss": 0.0428, + "step": 2418 + }, + { + "epoch": 1.8913213448006254, + "grad_norm": 0.3399001955986023, + "learning_rate": 8.96924718637332e-07, + "loss": 0.0582, + "step": 2419 + }, + { + "epoch": 1.892103205629398, + "grad_norm": 0.4081033170223236, + "learning_rate": 8.841037804962094e-07, + "loss": 0.0682, + "step": 2420 + }, + { + "epoch": 1.8928850664581705, + "grad_norm": 0.4025924801826477, + "learning_rate": 8.713743191863633e-07, + "loss": 0.0282, + "step": 2421 + }, + { + "epoch": 1.8936669272869429, + "grad_norm": 0.36863648891448975, + "learning_rate": 8.587363584160401e-07, + "loss": 0.0491, + "step": 2422 + }, + { + "epoch": 1.8944487881157155, + "grad_norm": 0.42005249857902527, + "learning_rate": 8.46189921723034e-07, + "loss": 0.0637, + "step": 2423 + }, + { + "epoch": 1.895230648944488, + "grad_norm": 0.48608678579330444, + "learning_rate": 8.337350324746917e-07, + "loss": 0.0535, + "step": 2424 + }, + { + "epoch": 1.8960125097732603, + "grad_norm": 0.3172799348831177, + "learning_rate": 8.21371713867869e-07, + "loss": 0.0356, + "step": 2425 + }, + { + "epoch": 1.896794370602033, + "grad_norm": 0.49111539125442505, + "learning_rate": 8.090999889288576e-07, + "loss": 0.0542, + "step": 2426 + }, + { + "epoch": 1.8975762314308053, + "grad_norm": 0.38987061381340027, + "learning_rate": 7.969198805133638e-07, + "loss": 0.0634, + "step": 2427 + }, + { + "epoch": 1.8983580922595777, + "grad_norm": 0.3516485393047333, + "learning_rate": 7.848314113064636e-07, + "loss": 0.0278, + "step": 2428 + }, + { + "epoch": 1.8991399530883504, + "grad_norm": 0.32461583614349365, + "learning_rate": 7.728346038225475e-07, + "loss": 0.0242, + "step": 2429 + }, + { + "epoch": 1.8999218139171228, + "grad_norm": 0.8025784492492676, + "learning_rate": 7.609294804053091e-07, + "loss": 0.0712, + "step": 2430 + }, + { + "epoch": 1.9007036747458952, + "grad_norm": 0.33313748240470886, + "learning_rate": 7.491160632276562e-07, + "loss": 0.0563, + "step": 2431 + }, + { + "epoch": 1.9014855355746678, + "grad_norm": 0.39373156428337097, + "learning_rate": 7.373943742917277e-07, + "loss": 0.0381, + "step": 2432 + }, + { + "epoch": 1.9022673964034402, + "grad_norm": 0.37192046642303467, + "learning_rate": 7.25764435428794e-07, + "loss": 0.0945, + "step": 2433 + }, + { + "epoch": 1.9030492572322126, + "grad_norm": 0.31872573494911194, + "learning_rate": 7.142262682992562e-07, + "loss": 0.0275, + "step": 2434 + }, + { + "epoch": 1.9038311180609853, + "grad_norm": 0.5576297044754028, + "learning_rate": 7.027798943925967e-07, + "loss": 0.0466, + "step": 2435 + }, + { + "epoch": 1.9046129788897577, + "grad_norm": 0.4052869975566864, + "learning_rate": 6.914253350273292e-07, + "loss": 0.0395, + "step": 2436 + }, + { + "epoch": 1.90539483971853, + "grad_norm": 0.30455952882766724, + "learning_rate": 6.801626113509651e-07, + "loss": 0.0558, + "step": 2437 + }, + { + "epoch": 1.9061767005473027, + "grad_norm": 0.6548801064491272, + "learning_rate": 6.689917443399862e-07, + "loss": 0.0499, + "step": 2438 + }, + { + "epoch": 1.9069585613760751, + "grad_norm": 0.3970543444156647, + "learning_rate": 6.579127547997721e-07, + "loss": 0.0741, + "step": 2439 + }, + { + "epoch": 1.9077404222048475, + "grad_norm": 0.4393087327480316, + "learning_rate": 6.469256633646059e-07, + "loss": 0.0449, + "step": 2440 + }, + { + "epoch": 1.9085222830336201, + "grad_norm": 0.319876492023468, + "learning_rate": 6.360304904976022e-07, + "loss": 0.0605, + "step": 2441 + }, + { + "epoch": 1.9093041438623923, + "grad_norm": 0.4261433482170105, + "learning_rate": 6.252272564906848e-07, + "loss": 0.0418, + "step": 2442 + }, + { + "epoch": 1.910086004691165, + "grad_norm": 0.44413477182388306, + "learning_rate": 6.145159814645362e-07, + "loss": 0.0762, + "step": 2443 + }, + { + "epoch": 1.9108678655199376, + "grad_norm": 0.3411540985107422, + "learning_rate": 6.038966853685712e-07, + "loss": 0.0413, + "step": 2444 + }, + { + "epoch": 1.9116497263487098, + "grad_norm": 0.4325558543205261, + "learning_rate": 5.933693879809132e-07, + "loss": 0.0458, + "step": 2445 + }, + { + "epoch": 1.9124315871774824, + "grad_norm": 0.5270968675613403, + "learning_rate": 5.829341089083118e-07, + "loss": 0.0636, + "step": 2446 + }, + { + "epoch": 1.913213448006255, + "grad_norm": 0.43004176020622253, + "learning_rate": 5.725908675861535e-07, + "loss": 0.0581, + "step": 2447 + }, + { + "epoch": 1.9139953088350272, + "grad_norm": 0.5290845036506653, + "learning_rate": 5.623396832784123e-07, + "loss": 0.0673, + "step": 2448 + }, + { + "epoch": 1.9147771696637998, + "grad_norm": 0.3586988151073456, + "learning_rate": 5.521805750775877e-07, + "loss": 0.0387, + "step": 2449 + }, + { + "epoch": 1.9155590304925725, + "grad_norm": 0.4046856760978699, + "learning_rate": 5.421135619047113e-07, + "loss": 0.0678, + "step": 2450 + }, + { + "epoch": 1.9163408913213447, + "grad_norm": 0.3092903196811676, + "learning_rate": 5.32138662509285e-07, + "loss": 0.0489, + "step": 2451 + }, + { + "epoch": 1.9171227521501173, + "grad_norm": 0.42022499442100525, + "learning_rate": 5.222558954692424e-07, + "loss": 0.0447, + "step": 2452 + }, + { + "epoch": 1.91790461297889, + "grad_norm": 0.4458214044570923, + "learning_rate": 5.124652791909324e-07, + "loss": 0.0447, + "step": 2453 + }, + { + "epoch": 1.918686473807662, + "grad_norm": 0.37331241369247437, + "learning_rate": 5.027668319090739e-07, + "loss": 0.0407, + "step": 2454 + }, + { + "epoch": 1.9194683346364347, + "grad_norm": 0.37612366676330566, + "learning_rate": 4.931605716867293e-07, + "loss": 0.0404, + "step": 2455 + }, + { + "epoch": 1.9202501954652071, + "grad_norm": 0.4228704273700714, + "learning_rate": 4.836465164152426e-07, + "loss": 0.0311, + "step": 2456 + }, + { + "epoch": 1.9210320562939796, + "grad_norm": 0.4672371745109558, + "learning_rate": 4.742246838142672e-07, + "loss": 0.0624, + "step": 2457 + }, + { + "epoch": 1.9218139171227522, + "grad_norm": 0.40904104709625244, + "learning_rate": 4.6489509143166633e-07, + "loss": 0.0491, + "step": 2458 + }, + { + "epoch": 1.9225957779515246, + "grad_norm": 0.5268110632896423, + "learning_rate": 4.5565775664351275e-07, + "loss": 0.0688, + "step": 2459 + }, + { + "epoch": 1.923377638780297, + "grad_norm": 0.3935900628566742, + "learning_rate": 4.465126966540556e-07, + "loss": 0.0488, + "step": 2460 + }, + { + "epoch": 1.9241594996090696, + "grad_norm": 0.4083102345466614, + "learning_rate": 4.3745992849568707e-07, + "loss": 0.0442, + "step": 2461 + }, + { + "epoch": 1.924941360437842, + "grad_norm": 0.3193533718585968, + "learning_rate": 4.28499469028909e-07, + "loss": 0.0385, + "step": 2462 + }, + { + "epoch": 1.9257232212666144, + "grad_norm": 0.48666051030158997, + "learning_rate": 4.196313349422942e-07, + "loss": 0.068, + "step": 2463 + }, + { + "epoch": 1.926505082095387, + "grad_norm": 0.42172297835350037, + "learning_rate": 4.1085554275246965e-07, + "loss": 0.0441, + "step": 2464 + }, + { + "epoch": 1.9272869429241595, + "grad_norm": 0.47432318329811096, + "learning_rate": 4.021721088040775e-07, + "loss": 0.0652, + "step": 2465 + }, + { + "epoch": 1.9280688037529319, + "grad_norm": 0.3693954646587372, + "learning_rate": 3.9358104926974225e-07, + "loss": 0.0727, + "step": 2466 + }, + { + "epoch": 1.9288506645817045, + "grad_norm": 0.5284464359283447, + "learning_rate": 3.8508238015003697e-07, + "loss": 0.0715, + "step": 2467 + }, + { + "epoch": 1.929632525410477, + "grad_norm": 0.43904706835746765, + "learning_rate": 3.7667611727348894e-07, + "loss": 0.0329, + "step": 2468 + }, + { + "epoch": 1.9304143862392493, + "grad_norm": 0.33485591411590576, + "learning_rate": 3.6836227629648e-07, + "loss": 0.0525, + "step": 2469 + }, + { + "epoch": 1.931196247068022, + "grad_norm": 0.34851041436195374, + "learning_rate": 3.6014087270329064e-07, + "loss": 0.0236, + "step": 2470 + }, + { + "epoch": 1.9319781078967944, + "grad_norm": 0.4184808135032654, + "learning_rate": 3.520119218060336e-07, + "loss": 0.0586, + "step": 2471 + }, + { + "epoch": 1.9327599687255668, + "grad_norm": 0.3958164155483246, + "learning_rate": 3.4397543874462037e-07, + "loss": 0.0626, + "step": 2472 + }, + { + "epoch": 1.9335418295543394, + "grad_norm": 0.3597150146961212, + "learning_rate": 3.360314384867558e-07, + "loss": 0.0873, + "step": 2473 + }, + { + "epoch": 1.9343236903831118, + "grad_norm": 0.4362090229988098, + "learning_rate": 3.281799358278881e-07, + "loss": 0.0284, + "step": 2474 + }, + { + "epoch": 1.9351055512118842, + "grad_norm": 0.4249899387359619, + "learning_rate": 3.2042094539120883e-07, + "loss": 0.0444, + "step": 2475 + }, + { + "epoch": 1.9358874120406568, + "grad_norm": 0.38876742124557495, + "learning_rate": 3.127544816275807e-07, + "loss": 0.0519, + "step": 2476 + }, + { + "epoch": 1.9366692728694292, + "grad_norm": 0.29063284397125244, + "learning_rate": 3.051805588155654e-07, + "loss": 0.0363, + "step": 2477 + }, + { + "epoch": 1.9374511336982017, + "grad_norm": 0.3670947849750519, + "learning_rate": 2.976991910613569e-07, + "loss": 0.0454, + "step": 2478 + }, + { + "epoch": 1.9382329945269743, + "grad_norm": 0.4057813286781311, + "learning_rate": 2.903103922987649e-07, + "loss": 0.0766, + "step": 2479 + }, + { + "epoch": 1.9390148553557467, + "grad_norm": 0.319303035736084, + "learning_rate": 2.8301417628920357e-07, + "loss": 0.0424, + "step": 2480 + }, + { + "epoch": 1.939796716184519, + "grad_norm": 0.33699148893356323, + "learning_rate": 2.7581055662164736e-07, + "loss": 0.057, + "step": 2481 + }, + { + "epoch": 1.9405785770132917, + "grad_norm": 0.3684520125389099, + "learning_rate": 2.6869954671261963e-07, + "loss": 0.0534, + "step": 2482 + }, + { + "epoch": 1.9413604378420641, + "grad_norm": 0.42329129576683044, + "learning_rate": 2.6168115980614303e-07, + "loss": 0.0476, + "step": 2483 + }, + { + "epoch": 1.9421422986708365, + "grad_norm": 0.34844499826431274, + "learning_rate": 2.547554089737503e-07, + "loss": 0.031, + "step": 2484 + }, + { + "epoch": 1.9429241594996092, + "grad_norm": 0.3760523796081543, + "learning_rate": 2.4792230711444544e-07, + "loss": 0.0544, + "step": 2485 + }, + { + "epoch": 1.9437060203283816, + "grad_norm": 0.45071330666542053, + "learning_rate": 2.411818669546595e-07, + "loss": 0.0371, + "step": 2486 + }, + { + "epoch": 1.944487881157154, + "grad_norm": 0.3400191068649292, + "learning_rate": 2.3453410104825046e-07, + "loss": 0.0458, + "step": 2487 + }, + { + "epoch": 1.9452697419859266, + "grad_norm": 0.45273348689079285, + "learning_rate": 2.2797902177648657e-07, + "loss": 0.0435, + "step": 2488 + }, + { + "epoch": 1.946051602814699, + "grad_norm": 0.49700218439102173, + "learning_rate": 2.2151664134799076e-07, + "loss": 0.0761, + "step": 2489 + }, + { + "epoch": 1.9468334636434714, + "grad_norm": 0.3172673285007477, + "learning_rate": 2.1514697179875198e-07, + "loss": 0.0512, + "step": 2490 + }, + { + "epoch": 1.947615324472244, + "grad_norm": 0.367340087890625, + "learning_rate": 2.0887002499207498e-07, + "loss": 0.0585, + "step": 2491 + }, + { + "epoch": 1.9483971853010165, + "grad_norm": 0.3881520926952362, + "learning_rate": 2.02685812618586e-07, + "loss": 0.042, + "step": 2492 + }, + { + "epoch": 1.9491790461297889, + "grad_norm": 0.5710947513580322, + "learning_rate": 1.9659434619617723e-07, + "loss": 0.0846, + "step": 2493 + }, + { + "epoch": 1.9499609069585615, + "grad_norm": 0.5186334252357483, + "learning_rate": 1.9059563707002903e-07, + "loss": 0.052, + "step": 2494 + }, + { + "epoch": 1.950742767787334, + "grad_norm": 0.3576108515262604, + "learning_rate": 1.846896964125433e-07, + "loss": 0.0623, + "step": 2495 + }, + { + "epoch": 1.9515246286161063, + "grad_norm": 0.3158591389656067, + "learning_rate": 1.78876535223349e-07, + "loss": 0.054, + "step": 2496 + }, + { + "epoch": 1.952306489444879, + "grad_norm": 0.3423866331577301, + "learning_rate": 1.731561643292856e-07, + "loss": 0.0535, + "step": 2497 + }, + { + "epoch": 1.9530883502736511, + "grad_norm": 0.4671078622341156, + "learning_rate": 1.6752859438435854e-07, + "loss": 0.0585, + "step": 2498 + }, + { + "epoch": 1.9538702111024238, + "grad_norm": 0.4123114049434662, + "learning_rate": 1.6199383586975037e-07, + "loss": 0.0384, + "step": 2499 + }, + { + "epoch": 1.9546520719311964, + "grad_norm": 0.4386757016181946, + "learning_rate": 1.5655189909377088e-07, + "loss": 0.0864, + "step": 2500 + }, + { + "epoch": 1.9554339327599686, + "grad_norm": 0.4988419711589813, + "learning_rate": 1.5120279419185701e-07, + "loss": 0.0741, + "step": 2501 + }, + { + "epoch": 1.9562157935887412, + "grad_norm": 0.37020736932754517, + "learning_rate": 1.459465311265562e-07, + "loss": 0.0434, + "step": 2502 + }, + { + "epoch": 1.9569976544175138, + "grad_norm": 0.32374125719070435, + "learning_rate": 1.4078311968749313e-07, + "loss": 0.0402, + "step": 2503 + }, + { + "epoch": 1.957779515246286, + "grad_norm": 0.4434984028339386, + "learning_rate": 1.357125694913586e-07, + "loss": 0.0483, + "step": 2504 + }, + { + "epoch": 1.9585613760750586, + "grad_norm": 0.3723717927932739, + "learning_rate": 1.307348899818983e-07, + "loss": 0.0529, + "step": 2505 + }, + { + "epoch": 1.9593432369038313, + "grad_norm": 0.45597562193870544, + "learning_rate": 1.2585009042987982e-07, + "loss": 0.0595, + "step": 2506 + }, + { + "epoch": 1.9601250977326035, + "grad_norm": 0.4516230821609497, + "learning_rate": 1.2105817993309786e-07, + "loss": 0.0558, + "step": 2507 + }, + { + "epoch": 1.960906958561376, + "grad_norm": 0.39071428775787354, + "learning_rate": 1.1635916741633557e-07, + "loss": 0.059, + "step": 2508 + }, + { + "epoch": 1.9616888193901487, + "grad_norm": 0.38830170035362244, + "learning_rate": 1.1175306163135335e-07, + "loss": 0.037, + "step": 2509 + }, + { + "epoch": 1.962470680218921, + "grad_norm": 0.50648432970047, + "learning_rate": 1.0723987115688339e-07, + "loss": 0.0698, + "step": 2510 + }, + { + "epoch": 1.9632525410476935, + "grad_norm": 0.5163297653198242, + "learning_rate": 1.0281960439860739e-07, + "loss": 0.0494, + "step": 2511 + }, + { + "epoch": 1.964034401876466, + "grad_norm": 0.3927709460258484, + "learning_rate": 9.849226958912328e-08, + "loss": 0.068, + "step": 2512 + }, + { + "epoch": 1.9648162627052383, + "grad_norm": 0.3887207806110382, + "learning_rate": 9.425787478796744e-08, + "loss": 0.0552, + "step": 2513 + }, + { + "epoch": 1.965598123534011, + "grad_norm": 0.35503289103507996, + "learning_rate": 9.011642788156472e-08, + "loss": 0.0488, + "step": 2514 + }, + { + "epoch": 1.9663799843627834, + "grad_norm": 0.5193012952804565, + "learning_rate": 8.606793658323398e-08, + "loss": 0.093, + "step": 2515 + }, + { + "epoch": 1.9671618451915558, + "grad_norm": 0.4710843861103058, + "learning_rate": 8.211240843316037e-08, + "loss": 0.0571, + "step": 2516 + }, + { + "epoch": 1.9679437060203284, + "grad_norm": 0.288714200258255, + "learning_rate": 7.824985079839531e-08, + "loss": 0.0294, + "step": 2517 + }, + { + "epoch": 1.9687255668491008, + "grad_norm": 0.5777271389961243, + "learning_rate": 7.448027087283426e-08, + "loss": 0.0718, + "step": 2518 + }, + { + "epoch": 1.9695074276778732, + "grad_norm": 0.4299383759498596, + "learning_rate": 7.08036756771946e-08, + "loss": 0.0661, + "step": 2519 + }, + { + "epoch": 1.9702892885066459, + "grad_norm": 0.3604923486709595, + "learning_rate": 6.722007205903213e-08, + "loss": 0.0648, + "step": 2520 + }, + { + "epoch": 1.9710711493354183, + "grad_norm": 0.3248717784881592, + "learning_rate": 6.372946669269131e-08, + "loss": 0.0364, + "step": 2521 + }, + { + "epoch": 1.9718530101641907, + "grad_norm": 0.4972895085811615, + "learning_rate": 6.033186607931062e-08, + "loss": 0.0711, + "step": 2522 + }, + { + "epoch": 1.9726348709929633, + "grad_norm": 0.39111319184303284, + "learning_rate": 5.702727654682272e-08, + "loss": 0.0584, + "step": 2523 + }, + { + "epoch": 1.9734167318217357, + "grad_norm": 0.3655970096588135, + "learning_rate": 5.381570424992655e-08, + "loss": 0.0478, + "step": 2524 + }, + { + "epoch": 1.9741985926505081, + "grad_norm": 0.4365689754486084, + "learning_rate": 5.069715517007079e-08, + "loss": 0.0401, + "step": 2525 + }, + { + "epoch": 1.9749804534792808, + "grad_norm": 0.6145164966583252, + "learning_rate": 4.7671635115453805e-08, + "loss": 0.0792, + "step": 2526 + }, + { + "epoch": 1.9757623143080532, + "grad_norm": 0.3456154763698578, + "learning_rate": 4.473914972101256e-08, + "loss": 0.0381, + "step": 2527 + }, + { + "epoch": 1.9765441751368256, + "grad_norm": 0.47349321842193604, + "learning_rate": 4.189970444841707e-08, + "loss": 0.0817, + "step": 2528 + }, + { + "epoch": 1.9773260359655982, + "grad_norm": 0.40150561928749084, + "learning_rate": 3.9153304586042605e-08, + "loss": 0.0846, + "step": 2529 + }, + { + "epoch": 1.9781078967943706, + "grad_norm": 0.49519282579421997, + "learning_rate": 3.649995524896976e-08, + "loss": 0.0631, + "step": 2530 + }, + { + "epoch": 1.978889757623143, + "grad_norm": 0.4056538939476013, + "learning_rate": 3.393966137898441e-08, + "loss": 0.0373, + "step": 2531 + }, + { + "epoch": 1.9796716184519156, + "grad_norm": 0.35286810994148254, + "learning_rate": 3.147242774454995e-08, + "loss": 0.0364, + "step": 2532 + }, + { + "epoch": 1.980453479280688, + "grad_norm": 0.3447190523147583, + "learning_rate": 2.9098258940818414e-08, + "loss": 0.0326, + "step": 2533 + }, + { + "epoch": 1.9812353401094605, + "grad_norm": 0.3452252745628357, + "learning_rate": 2.6817159389597167e-08, + "loss": 0.0362, + "step": 2534 + }, + { + "epoch": 1.982017200938233, + "grad_norm": 0.4439004361629486, + "learning_rate": 2.4629133339371113e-08, + "loss": 0.0566, + "step": 2535 + }, + { + "epoch": 1.9827990617670055, + "grad_norm": 0.37382808327674866, + "learning_rate": 2.253418486526382e-08, + "loss": 0.0406, + "step": 2536 + }, + { + "epoch": 1.983580922595778, + "grad_norm": 0.327451229095459, + "learning_rate": 2.0532317869059735e-08, + "loss": 0.0353, + "step": 2537 + }, + { + "epoch": 1.9843627834245505, + "grad_norm": 0.3801063001155853, + "learning_rate": 1.862353607917089e-08, + "loss": 0.0353, + "step": 2538 + }, + { + "epoch": 1.985144644253323, + "grad_norm": 0.45267173647880554, + "learning_rate": 1.6807843050636874e-08, + "loss": 0.0283, + "step": 2539 + }, + { + "epoch": 1.9859265050820953, + "grad_norm": 0.41038188338279724, + "learning_rate": 1.508524216514151e-08, + "loss": 0.0328, + "step": 2540 + }, + { + "epoch": 1.986708365910868, + "grad_norm": 0.3300861716270447, + "learning_rate": 1.345573663096289e-08, + "loss": 0.0451, + "step": 2541 + }, + { + "epoch": 1.9874902267396404, + "grad_norm": 0.32301145792007446, + "learning_rate": 1.1919329483017772e-08, + "loss": 0.0481, + "step": 2542 + }, + { + "epoch": 1.9882720875684128, + "grad_norm": 0.3235101103782654, + "learning_rate": 1.0476023582806083e-08, + "loss": 0.0572, + "step": 2543 + }, + { + "epoch": 1.9890539483971854, + "grad_norm": 0.3280138373374939, + "learning_rate": 9.12582161844422e-09, + "loss": 0.0518, + "step": 2544 + }, + { + "epoch": 1.9898358092259578, + "grad_norm": 0.4068671762943268, + "learning_rate": 7.868726104642844e-09, + "loss": 0.0492, + "step": 2545 + }, + { + "epoch": 1.9906176700547302, + "grad_norm": 0.4065364897251129, + "learning_rate": 6.704739382701331e-09, + "loss": 0.048, + "step": 2546 + }, + { + "epoch": 1.9913995308835029, + "grad_norm": 0.35321515798568726, + "learning_rate": 5.633863620507773e-09, + "loss": 0.0613, + "step": 2547 + }, + { + "epoch": 1.9921813917122753, + "grad_norm": 0.3954537510871887, + "learning_rate": 4.656100812538977e-09, + "loss": 0.0329, + "step": 2548 + }, + { + "epoch": 1.9929632525410477, + "grad_norm": 0.32833099365234375, + "learning_rate": 3.7714527798438095e-09, + "loss": 0.0346, + "step": 2549 + }, + { + "epoch": 1.9937451133698203, + "grad_norm": 0.4241968095302582, + "learning_rate": 2.9799211700543005e-09, + "loss": 0.0452, + "step": 2550 + }, + { + "epoch": 1.9945269741985927, + "grad_norm": 0.4431023597717285, + "learning_rate": 2.2815074573745434e-09, + "loss": 0.0458, + "step": 2551 + }, + { + "epoch": 1.9953088350273651, + "grad_norm": 0.42436906695365906, + "learning_rate": 1.6762129425751394e-09, + "loss": 0.0471, + "step": 2552 + }, + { + "epoch": 1.9960906958561377, + "grad_norm": 0.4154678285121918, + "learning_rate": 1.164038752998753e-09, + "loss": 0.0637, + "step": 2553 + }, + { + "epoch": 1.99687255668491, + "grad_norm": 0.3614688813686371, + "learning_rate": 7.449858425545575e-10, + "loss": 0.0862, + "step": 2554 + }, + { + "epoch": 1.9976544175136826, + "grad_norm": 0.3825763165950775, + "learning_rate": 4.1905499171268626e-10, + "loss": 0.0959, + "step": 2555 + }, + { + "epoch": 1.9984362783424552, + "grad_norm": 0.3862949311733246, + "learning_rate": 1.8624680751533342e-10, + "loss": 0.0502, + "step": 2556 + }, + { + "epoch": 1.9992181391712274, + "grad_norm": 0.36724698543548584, + "learning_rate": 4.6561723560101245e-11, + "loss": 0.0472, + "step": 2557 + }, + { + "epoch": 2.0, + "grad_norm": 0.4372091293334961, + "learning_rate": 0.0, + "loss": 0.0345, + "step": 2558 + }, + { + "epoch": 2.0, + "step": 2558, + "total_flos": 2.7985345273774735e+18, + "train_loss": 0.11478913314727902, + "train_runtime": 9036.0615, + "train_samples_per_second": 1.132, + "train_steps_per_second": 0.283 + } + ], + "logging_steps": 1, + "max_steps": 2558, + "num_input_tokens_seen": 0, + "num_train_epochs": 2, + "save_steps": 1000, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": true + }, + "attributes": {} + } + }, + "total_flos": 2.7985345273774735e+18, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +}