|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9990426041168023, |
|
"eval_steps": 500, |
|
"global_step": 348, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0057443752991862135, |
|
"grad_norm": 2.3415639400482178, |
|
"learning_rate": 5.714285714285715e-07, |
|
"loss": 0.9401, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.011488750598372427, |
|
"grad_norm": 2.4453043937683105, |
|
"learning_rate": 1.142857142857143e-06, |
|
"loss": 0.9748, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01723312589755864, |
|
"grad_norm": 2.4993081092834473, |
|
"learning_rate": 1.7142857142857145e-06, |
|
"loss": 1.0062, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.022977501196744854, |
|
"grad_norm": 2.389991283416748, |
|
"learning_rate": 2.285714285714286e-06, |
|
"loss": 0.9669, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.028721876495931067, |
|
"grad_norm": 2.3091812133789062, |
|
"learning_rate": 2.8571428571428573e-06, |
|
"loss": 0.9599, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03446625179511728, |
|
"grad_norm": 2.33378005027771, |
|
"learning_rate": 3.428571428571429e-06, |
|
"loss": 0.9387, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.040210627094303494, |
|
"grad_norm": 1.7867075204849243, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.9544, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.04595500239348971, |
|
"grad_norm": 1.6889110803604126, |
|
"learning_rate": 4.571428571428572e-06, |
|
"loss": 0.9329, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.05169937769267592, |
|
"grad_norm": 1.312399983406067, |
|
"learning_rate": 5.142857142857142e-06, |
|
"loss": 0.8812, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.057443752991862135, |
|
"grad_norm": 1.330713152885437, |
|
"learning_rate": 5.7142857142857145e-06, |
|
"loss": 0.9434, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06318812829104835, |
|
"grad_norm": 1.222688913345337, |
|
"learning_rate": 6.285714285714286e-06, |
|
"loss": 0.9299, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.06893250359023456, |
|
"grad_norm": 0.9597654938697815, |
|
"learning_rate": 6.857142857142858e-06, |
|
"loss": 0.8267, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.07467687888942078, |
|
"grad_norm": 1.4648733139038086, |
|
"learning_rate": 7.428571428571429e-06, |
|
"loss": 0.9062, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.08042125418860699, |
|
"grad_norm": 1.3538185358047485, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.8824, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0861656294877932, |
|
"grad_norm": 1.204816460609436, |
|
"learning_rate": 8.571428571428571e-06, |
|
"loss": 0.8973, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.09191000478697942, |
|
"grad_norm": 0.9612676501274109, |
|
"learning_rate": 9.142857142857144e-06, |
|
"loss": 0.8962, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.09765438008616563, |
|
"grad_norm": 1.2647408246994019, |
|
"learning_rate": 9.714285714285715e-06, |
|
"loss": 0.8473, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.10339875538535184, |
|
"grad_norm": 1.111392855644226, |
|
"learning_rate": 1.0285714285714285e-05, |
|
"loss": 0.8617, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.10914313068453806, |
|
"grad_norm": 0.9279037714004517, |
|
"learning_rate": 1.0857142857142858e-05, |
|
"loss": 0.7841, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.11488750598372427, |
|
"grad_norm": 0.8200046420097351, |
|
"learning_rate": 1.1428571428571429e-05, |
|
"loss": 0.823, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.12063188128291048, |
|
"grad_norm": 0.7917659282684326, |
|
"learning_rate": 1.2e-05, |
|
"loss": 0.8065, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.1263762565820967, |
|
"grad_norm": 0.7997102737426758, |
|
"learning_rate": 1.2571428571428572e-05, |
|
"loss": 0.7953, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.13212063188128292, |
|
"grad_norm": 0.7637982368469238, |
|
"learning_rate": 1.3142857142857145e-05, |
|
"loss": 0.7559, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.13786500718046912, |
|
"grad_norm": 0.6843277215957642, |
|
"learning_rate": 1.3714285714285716e-05, |
|
"loss": 0.7994, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.14360938247965535, |
|
"grad_norm": 0.5938118100166321, |
|
"learning_rate": 1.4285714285714287e-05, |
|
"loss": 0.7868, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.14935375777884155, |
|
"grad_norm": 0.6095629930496216, |
|
"learning_rate": 1.4857142857142858e-05, |
|
"loss": 0.7764, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.15509813307802778, |
|
"grad_norm": 0.7267497777938843, |
|
"learning_rate": 1.542857142857143e-05, |
|
"loss": 0.82, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.16084250837721398, |
|
"grad_norm": 0.6032124757766724, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 0.777, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.1665868836764002, |
|
"grad_norm": 0.5352451801300049, |
|
"learning_rate": 1.6571428571428574e-05, |
|
"loss": 0.7643, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.1723312589755864, |
|
"grad_norm": 0.6523836851119995, |
|
"learning_rate": 1.7142857142857142e-05, |
|
"loss": 0.7727, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.17807563427477263, |
|
"grad_norm": 0.5683100819587708, |
|
"learning_rate": 1.7714285714285717e-05, |
|
"loss": 0.7241, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.18382000957395883, |
|
"grad_norm": 0.5439429879188538, |
|
"learning_rate": 1.8285714285714288e-05, |
|
"loss": 0.7011, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.18956438487314506, |
|
"grad_norm": 0.5817928314208984, |
|
"learning_rate": 1.885714285714286e-05, |
|
"loss": 0.7521, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.19530876017233126, |
|
"grad_norm": 0.5671186447143555, |
|
"learning_rate": 1.942857142857143e-05, |
|
"loss": 0.7757, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.20105313547151749, |
|
"grad_norm": 0.5008598566055298, |
|
"learning_rate": 2e-05, |
|
"loss": 0.7618, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.20679751077070369, |
|
"grad_norm": 0.4747866988182068, |
|
"learning_rate": 1.9999496293646753e-05, |
|
"loss": 0.7546, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.2125418860698899, |
|
"grad_norm": 0.5479198694229126, |
|
"learning_rate": 1.999798522533102e-05, |
|
"loss": 0.7362, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.2182862613690761, |
|
"grad_norm": 0.4488674998283386, |
|
"learning_rate": 1.9995466947279753e-05, |
|
"loss": 0.751, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.22403063666826234, |
|
"grad_norm": 0.4446718692779541, |
|
"learning_rate": 1.9991941713187477e-05, |
|
"loss": 0.7554, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.22977501196744854, |
|
"grad_norm": 0.49354538321495056, |
|
"learning_rate": 1.9987409878190752e-05, |
|
"loss": 0.7234, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.23551938726663477, |
|
"grad_norm": 0.4710958003997803, |
|
"learning_rate": 1.99818718988324e-05, |
|
"loss": 0.7789, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.24126376256582097, |
|
"grad_norm": 0.43605947494506836, |
|
"learning_rate": 1.9975328333015497e-05, |
|
"loss": 0.7061, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.2470081378650072, |
|
"grad_norm": 0.45247551798820496, |
|
"learning_rate": 1.9967779839947172e-05, |
|
"loss": 0.7491, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.2527525131641934, |
|
"grad_norm": 0.46377840638160706, |
|
"learning_rate": 1.9959227180072216e-05, |
|
"loss": 0.7646, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.2584968884633796, |
|
"grad_norm": 0.43657904863357544, |
|
"learning_rate": 1.9949671214996448e-05, |
|
"loss": 0.7471, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.26424126376256585, |
|
"grad_norm": 0.45506250858306885, |
|
"learning_rate": 1.993911290739993e-05, |
|
"loss": 0.6914, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.26998563906175205, |
|
"grad_norm": 0.43989095091819763, |
|
"learning_rate": 1.992755332093999e-05, |
|
"loss": 0.7263, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.27573001436093825, |
|
"grad_norm": 0.42011386156082153, |
|
"learning_rate": 1.9914993620144055e-05, |
|
"loss": 0.7128, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.28147438966012445, |
|
"grad_norm": 0.5503424406051636, |
|
"learning_rate": 1.990143507029234e-05, |
|
"loss": 0.7515, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.2872187649593107, |
|
"grad_norm": 0.40828776359558105, |
|
"learning_rate": 1.9886879037290385e-05, |
|
"loss": 0.7013, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2929631402584969, |
|
"grad_norm": 0.476153165102005, |
|
"learning_rate": 1.9871326987531453e-05, |
|
"loss": 0.6848, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.2987075155576831, |
|
"grad_norm": 0.49468863010406494, |
|
"learning_rate": 1.98547804877488e-05, |
|
"loss": 0.7401, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.3044518908568693, |
|
"grad_norm": 0.42131319642066956, |
|
"learning_rate": 1.983724120485783e-05, |
|
"loss": 0.7085, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.31019626615605556, |
|
"grad_norm": 0.4671567976474762, |
|
"learning_rate": 1.9818710905788195e-05, |
|
"loss": 0.6934, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.31594064145524176, |
|
"grad_norm": 0.4545610249042511, |
|
"learning_rate": 1.9799191457305767e-05, |
|
"loss": 0.6978, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.32168501675442795, |
|
"grad_norm": 0.4409993290901184, |
|
"learning_rate": 1.977868482582459e-05, |
|
"loss": 0.7105, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.32742939205361415, |
|
"grad_norm": 0.42373812198638916, |
|
"learning_rate": 1.9757193077208776e-05, |
|
"loss": 0.7167, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.3331737673528004, |
|
"grad_norm": 0.47674381732940674, |
|
"learning_rate": 1.9734718376564386e-05, |
|
"loss": 0.714, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.3389181426519866, |
|
"grad_norm": 0.42494723200798035, |
|
"learning_rate": 1.9711262988021322e-05, |
|
"loss": 0.684, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.3446625179511728, |
|
"grad_norm": 0.42938312888145447, |
|
"learning_rate": 1.968682927450523e-05, |
|
"loss": 0.6904, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.350406893250359, |
|
"grad_norm": 0.4486340880393982, |
|
"learning_rate": 1.9661419697499455e-05, |
|
"loss": 0.7125, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.35615126854954526, |
|
"grad_norm": 0.39026889204978943, |
|
"learning_rate": 1.9635036816797072e-05, |
|
"loss": 0.7237, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.36189564384873146, |
|
"grad_norm": 0.39467936754226685, |
|
"learning_rate": 1.960768329024301e-05, |
|
"loss": 0.7073, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.36764001914791766, |
|
"grad_norm": 0.48663580417633057, |
|
"learning_rate": 1.957936187346628e-05, |
|
"loss": 0.6869, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.37338439444710386, |
|
"grad_norm": 0.39954885840415955, |
|
"learning_rate": 1.955007541960241e-05, |
|
"loss": 0.7127, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.3791287697462901, |
|
"grad_norm": 0.4239981770515442, |
|
"learning_rate": 1.9519826879005964e-05, |
|
"loss": 0.7028, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.3848731450454763, |
|
"grad_norm": 0.40746739506721497, |
|
"learning_rate": 1.948861929895336e-05, |
|
"loss": 0.6578, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.3906175203446625, |
|
"grad_norm": 0.4423011541366577, |
|
"learning_rate": 1.945645582333587e-05, |
|
"loss": 0.6866, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.3963618956438487, |
|
"grad_norm": 0.45761850476264954, |
|
"learning_rate": 1.9423339692342885e-05, |
|
"loss": 0.7173, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.40210627094303497, |
|
"grad_norm": 0.3713398575782776, |
|
"learning_rate": 1.9389274242135528e-05, |
|
"loss": 0.6318, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.40785064624222117, |
|
"grad_norm": 0.43260371685028076, |
|
"learning_rate": 1.9354262904510544e-05, |
|
"loss": 0.6907, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.41359502154140737, |
|
"grad_norm": 0.3918673098087311, |
|
"learning_rate": 1.9318309206554567e-05, |
|
"loss": 0.666, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.41933939684059357, |
|
"grad_norm": 0.4002845585346222, |
|
"learning_rate": 1.9281416770288806e-05, |
|
"loss": 0.6948, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.4250837721397798, |
|
"grad_norm": 0.4247376024723053, |
|
"learning_rate": 1.924358931230418e-05, |
|
"loss": 0.696, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.430828147438966, |
|
"grad_norm": 0.3774103820323944, |
|
"learning_rate": 1.920483064338687e-05, |
|
"loss": 0.7083, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.4365725227381522, |
|
"grad_norm": 0.4032825231552124, |
|
"learning_rate": 1.9165144668134426e-05, |
|
"loss": 0.6791, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.4423168980373384, |
|
"grad_norm": 0.3765489459037781, |
|
"learning_rate": 1.9124535384562423e-05, |
|
"loss": 0.7115, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.4480612733365247, |
|
"grad_norm": 0.3774646520614624, |
|
"learning_rate": 1.9083006883701688e-05, |
|
"loss": 0.6839, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.4538056486357109, |
|
"grad_norm": 0.4303993880748749, |
|
"learning_rate": 1.904056334918617e-05, |
|
"loss": 0.6983, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.4595500239348971, |
|
"grad_norm": 0.37852737307548523, |
|
"learning_rate": 1.8997209056831462e-05, |
|
"loss": 0.673, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.4652943992340833, |
|
"grad_norm": 0.4081466794013977, |
|
"learning_rate": 1.8952948374204066e-05, |
|
"loss": 0.6806, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.47103877453326953, |
|
"grad_norm": 0.4459618330001831, |
|
"learning_rate": 1.8907785760181392e-05, |
|
"loss": 0.7013, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.47678314983245573, |
|
"grad_norm": 0.4105455279350281, |
|
"learning_rate": 1.8861725764502557e-05, |
|
"loss": 0.6836, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.48252752513164193, |
|
"grad_norm": 0.4865868091583252, |
|
"learning_rate": 1.881477302731006e-05, |
|
"loss": 0.7153, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.48827190043082813, |
|
"grad_norm": 0.38876765966415405, |
|
"learning_rate": 1.87669322786823e-05, |
|
"loss": 0.6666, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.4940162757300144, |
|
"grad_norm": 0.4097835421562195, |
|
"learning_rate": 1.8718208338157082e-05, |
|
"loss": 0.686, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.4997606510292006, |
|
"grad_norm": 0.3842048645019531, |
|
"learning_rate": 1.866860611424609e-05, |
|
"loss": 0.7049, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.5055050263283868, |
|
"grad_norm": 0.4164787530899048, |
|
"learning_rate": 1.8618130603940386e-05, |
|
"loss": 0.6875, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.511249401627573, |
|
"grad_norm": 0.3905528783798218, |
|
"learning_rate": 1.856678689220701e-05, |
|
"loss": 0.6863, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.5169937769267592, |
|
"grad_norm": 0.42154574394226074, |
|
"learning_rate": 1.851458015147673e-05, |
|
"loss": 0.696, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.5227381522259454, |
|
"grad_norm": 0.43495407700538635, |
|
"learning_rate": 1.846151564112294e-05, |
|
"loss": 0.67, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.5284825275251317, |
|
"grad_norm": 0.4206148684024811, |
|
"learning_rate": 1.840759870693184e-05, |
|
"loss": 0.6794, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.5342269028243178, |
|
"grad_norm": 0.37400227785110474, |
|
"learning_rate": 1.8352834780563888e-05, |
|
"loss": 0.6633, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.5399712781235041, |
|
"grad_norm": 0.43993040919303894, |
|
"learning_rate": 1.8297229379006614e-05, |
|
"loss": 0.6946, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.5457156534226902, |
|
"grad_norm": 0.42568662762641907, |
|
"learning_rate": 1.8240788104018824e-05, |
|
"loss": 0.685, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.5514600287218765, |
|
"grad_norm": 0.39264383912086487, |
|
"learning_rate": 1.8183516641566278e-05, |
|
"loss": 0.6972, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.5572044040210627, |
|
"grad_norm": 0.497275173664093, |
|
"learning_rate": 1.8125420761248878e-05, |
|
"loss": 0.6646, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.5629487793202489, |
|
"grad_norm": 0.47043195366859436, |
|
"learning_rate": 1.806650631571943e-05, |
|
"loss": 0.7161, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.5686931546194351, |
|
"grad_norm": 0.3922593295574188, |
|
"learning_rate": 1.8006779240094024e-05, |
|
"loss": 0.6633, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.5744375299186214, |
|
"grad_norm": 0.5541552305221558, |
|
"learning_rate": 1.7946245551354156e-05, |
|
"loss": 0.6671, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.5801819052178075, |
|
"grad_norm": 0.4806561768054962, |
|
"learning_rate": 1.7884911347740556e-05, |
|
"loss": 0.7065, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.5859262805169938, |
|
"grad_norm": 0.5252166986465454, |
|
"learning_rate": 1.782278280813882e-05, |
|
"loss": 0.6993, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.59167065581618, |
|
"grad_norm": 0.3711872696876526, |
|
"learning_rate": 1.775986619145697e-05, |
|
"loss": 0.6805, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.5974150311153662, |
|
"grad_norm": 0.4423963129520416, |
|
"learning_rate": 1.7696167835994927e-05, |
|
"loss": 0.6613, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.6031594064145525, |
|
"grad_norm": 0.455920934677124, |
|
"learning_rate": 1.7631694158805945e-05, |
|
"loss": 0.6647, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.6089037817137386, |
|
"grad_norm": 0.46443215012550354, |
|
"learning_rate": 1.7566451655050197e-05, |
|
"loss": 0.6931, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.6146481570129249, |
|
"grad_norm": 0.4322095811367035, |
|
"learning_rate": 1.7500446897340408e-05, |
|
"loss": 0.6849, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.6203925323121111, |
|
"grad_norm": 0.4423225522041321, |
|
"learning_rate": 1.7433686535079736e-05, |
|
"loss": 0.7008, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.6261369076112973, |
|
"grad_norm": 0.47950470447540283, |
|
"learning_rate": 1.736617729379191e-05, |
|
"loss": 0.6424, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.6318812829104835, |
|
"grad_norm": 0.4083796441555023, |
|
"learning_rate": 1.7297925974443675e-05, |
|
"loss": 0.7132, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.6376256582096697, |
|
"grad_norm": 0.5103936791419983, |
|
"learning_rate": 1.7228939452759666e-05, |
|
"loss": 0.6938, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.6433700335088559, |
|
"grad_norm": 0.44686976075172424, |
|
"learning_rate": 1.7159224678529734e-05, |
|
"loss": 0.7058, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.6491144088080422, |
|
"grad_norm": 0.43743202090263367, |
|
"learning_rate": 1.7088788674908817e-05, |
|
"loss": 0.6647, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.6548587841072283, |
|
"grad_norm": 0.4688310921192169, |
|
"learning_rate": 1.7017638537709426e-05, |
|
"loss": 0.6925, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.6606031594064146, |
|
"grad_norm": 0.46285468339920044, |
|
"learning_rate": 1.6945781434686783e-05, |
|
"loss": 0.7014, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.6663475347056008, |
|
"grad_norm": 0.45641931891441345, |
|
"learning_rate": 1.6873224604816753e-05, |
|
"loss": 0.6573, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.672091910004787, |
|
"grad_norm": 0.4532499611377716, |
|
"learning_rate": 1.679997535756657e-05, |
|
"loss": 0.6824, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.6778362853039732, |
|
"grad_norm": 0.4781695306301117, |
|
"learning_rate": 1.672604107215848e-05, |
|
"loss": 0.7109, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.6835806606031594, |
|
"grad_norm": 0.4657115638256073, |
|
"learning_rate": 1.6651429196826337e-05, |
|
"loss": 0.7126, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.6893250359023456, |
|
"grad_norm": 0.42937028408050537, |
|
"learning_rate": 1.6576147248065268e-05, |
|
"loss": 0.6648, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.6950694112015319, |
|
"grad_norm": 0.40193477272987366, |
|
"learning_rate": 1.6500202809874446e-05, |
|
"loss": 0.6911, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.700813786500718, |
|
"grad_norm": 0.40844428539276123, |
|
"learning_rate": 1.6423603532993074e-05, |
|
"loss": 0.6647, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.7065581617999043, |
|
"grad_norm": 0.4080415666103363, |
|
"learning_rate": 1.634635713412964e-05, |
|
"loss": 0.6576, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.7123025370990905, |
|
"grad_norm": 0.42746731638908386, |
|
"learning_rate": 1.626847139518452e-05, |
|
"loss": 0.7106, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.7180469123982767, |
|
"grad_norm": 0.43120649456977844, |
|
"learning_rate": 1.618995416246601e-05, |
|
"loss": 0.7052, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.7237912876974629, |
|
"grad_norm": 0.4337042570114136, |
|
"learning_rate": 1.6110813345899914e-05, |
|
"loss": 0.6595, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.7295356629966491, |
|
"grad_norm": 0.44476258754730225, |
|
"learning_rate": 1.6031056918232642e-05, |
|
"loss": 0.6587, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.7352800382958353, |
|
"grad_norm": 0.4917714297771454, |
|
"learning_rate": 1.595069291422807e-05, |
|
"loss": 0.6742, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.7410244135950216, |
|
"grad_norm": 0.4178856909275055, |
|
"learning_rate": 1.586972942985807e-05, |
|
"loss": 0.6836, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.7467687888942077, |
|
"grad_norm": 0.44966086745262146, |
|
"learning_rate": 1.5788174621486936e-05, |
|
"loss": 0.7005, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.752513164193394, |
|
"grad_norm": 0.3832118511199951, |
|
"learning_rate": 1.570603670504969e-05, |
|
"loss": 0.6582, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.7582575394925802, |
|
"grad_norm": 0.4839877188205719, |
|
"learning_rate": 1.5623323955224404e-05, |
|
"loss": 0.6633, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.7640019147917664, |
|
"grad_norm": 0.4487728774547577, |
|
"learning_rate": 1.5540044704598588e-05, |
|
"loss": 0.6549, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.7697462900909526, |
|
"grad_norm": 0.4354808032512665, |
|
"learning_rate": 1.5456207342829777e-05, |
|
"loss": 0.646, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.7754906653901388, |
|
"grad_norm": 0.5101595520973206, |
|
"learning_rate": 1.5371820315800316e-05, |
|
"loss": 0.6699, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.781235040689325, |
|
"grad_norm": 0.4810461103916168, |
|
"learning_rate": 1.5286892124766546e-05, |
|
"loss": 0.6794, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.7869794159885113, |
|
"grad_norm": 0.4083465039730072, |
|
"learning_rate": 1.5201431325502332e-05, |
|
"loss": 0.6381, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.7927237912876974, |
|
"grad_norm": 0.41611915826797485, |
|
"learning_rate": 1.5115446527437193e-05, |
|
"loss": 0.6881, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.7984681665868837, |
|
"grad_norm": 0.5111805200576782, |
|
"learning_rate": 1.5028946392788934e-05, |
|
"loss": 0.6979, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.8042125418860699, |
|
"grad_norm": 0.4130695164203644, |
|
"learning_rate": 1.4941939635691036e-05, |
|
"loss": 0.7042, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.8099569171852561, |
|
"grad_norm": 0.3808434307575226, |
|
"learning_rate": 1.4854435021314766e-05, |
|
"loss": 0.6561, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.8157012924844423, |
|
"grad_norm": 0.4564625918865204, |
|
"learning_rate": 1.4766441364986162e-05, |
|
"loss": 0.6984, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.8214456677836285, |
|
"grad_norm": 0.42112821340560913, |
|
"learning_rate": 1.467796753129797e-05, |
|
"loss": 0.6481, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.8271900430828147, |
|
"grad_norm": 0.4193544387817383, |
|
"learning_rate": 1.4589022433216616e-05, |
|
"loss": 0.6925, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.832934418382001, |
|
"grad_norm": 0.4384324252605438, |
|
"learning_rate": 1.4499615031184297e-05, |
|
"loss": 0.6748, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.8386787936811871, |
|
"grad_norm": 0.4146939218044281, |
|
"learning_rate": 1.4409754332216303e-05, |
|
"loss": 0.6733, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.8444231689803734, |
|
"grad_norm": 0.43513917922973633, |
|
"learning_rate": 1.431944938899363e-05, |
|
"loss": 0.6427, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.8501675442795597, |
|
"grad_norm": 0.4932457208633423, |
|
"learning_rate": 1.4228709298950998e-05, |
|
"loss": 0.6716, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.8559119195787458, |
|
"grad_norm": 0.41004639863967896, |
|
"learning_rate": 1.4137543203360382e-05, |
|
"loss": 0.6462, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.861656294877932, |
|
"grad_norm": 0.4532361328601837, |
|
"learning_rate": 1.4045960286410093e-05, |
|
"loss": 0.6759, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.8674006701771182, |
|
"grad_norm": 0.43148860335350037, |
|
"learning_rate": 1.395396977427955e-05, |
|
"loss": 0.6963, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.8731450454763044, |
|
"grad_norm": 0.49120664596557617, |
|
"learning_rate": 1.3861580934209832e-05, |
|
"loss": 0.6123, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.8788894207754907, |
|
"grad_norm": 0.41906511783599854, |
|
"learning_rate": 1.376880307357009e-05, |
|
"loss": 0.6903, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.8846337960746768, |
|
"grad_norm": 0.510985791683197, |
|
"learning_rate": 1.3675645538919884e-05, |
|
"loss": 0.6423, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.8903781713738631, |
|
"grad_norm": 0.4236752986907959, |
|
"learning_rate": 1.3582117715067628e-05, |
|
"loss": 0.6567, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.8961225466730494, |
|
"grad_norm": 0.4509469270706177, |
|
"learning_rate": 1.3488229024125142e-05, |
|
"loss": 0.6772, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.9018669219722355, |
|
"grad_norm": 0.43794822692871094, |
|
"learning_rate": 1.3393988924558445e-05, |
|
"loss": 0.6629, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.9076112972714218, |
|
"grad_norm": 0.4324384331703186, |
|
"learning_rate": 1.3299406910234917e-05, |
|
"loss": 0.6906, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.9133556725706079, |
|
"grad_norm": 0.43841367959976196, |
|
"learning_rate": 1.3204492509466862e-05, |
|
"loss": 0.6552, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.9191000478697942, |
|
"grad_norm": 0.4490503966808319, |
|
"learning_rate": 1.3109255284051615e-05, |
|
"loss": 0.6611, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.9248444231689804, |
|
"grad_norm": 0.42710503935813904, |
|
"learning_rate": 1.3013704828308276e-05, |
|
"loss": 0.6859, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.9305887984681666, |
|
"grad_norm": 0.43584081530570984, |
|
"learning_rate": 1.2917850768111171e-05, |
|
"loss": 0.6075, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.9363331737673528, |
|
"grad_norm": 0.5099417567253113, |
|
"learning_rate": 1.282170275992012e-05, |
|
"loss": 0.6776, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.9420775490665391, |
|
"grad_norm": 0.408748984336853, |
|
"learning_rate": 1.2725270489807637e-05, |
|
"loss": 0.6948, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.9478219243657252, |
|
"grad_norm": 0.4724796712398529, |
|
"learning_rate": 1.2628563672483147e-05, |
|
"loss": 0.6895, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.9535662996649115, |
|
"grad_norm": 0.46162188053131104, |
|
"learning_rate": 1.2531592050314308e-05, |
|
"loss": 0.6331, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.9593106749640976, |
|
"grad_norm": 0.5146743059158325, |
|
"learning_rate": 1.2434365392345553e-05, |
|
"loss": 0.6513, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.9650550502632839, |
|
"grad_norm": 0.45443448424339294, |
|
"learning_rate": 1.2336893493313946e-05, |
|
"loss": 0.6486, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.9707994255624701, |
|
"grad_norm": 0.45461684465408325, |
|
"learning_rate": 1.223918617266245e-05, |
|
"loss": 0.6673, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.9765438008616563, |
|
"grad_norm": 0.48310863971710205, |
|
"learning_rate": 1.2141253273550698e-05, |
|
"loss": 0.6593, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.9822881761608425, |
|
"grad_norm": 0.3722151517868042, |
|
"learning_rate": 1.2043104661863386e-05, |
|
"loss": 0.7001, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.9880325514600288, |
|
"grad_norm": 0.4841504395008087, |
|
"learning_rate": 1.1944750225216363e-05, |
|
"loss": 0.6606, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.9937769267592149, |
|
"grad_norm": 0.4420206546783447, |
|
"learning_rate": 1.1846199871960557e-05, |
|
"loss": 0.7026, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.9995213020584012, |
|
"grad_norm": 0.4380941689014435, |
|
"learning_rate": 1.1747463530183781e-05, |
|
"loss": 0.6817, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.0052656773575874, |
|
"grad_norm": 1.3084274530410767, |
|
"learning_rate": 1.1648551146710557e-05, |
|
"loss": 1.1567, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.0110100526567736, |
|
"grad_norm": 0.38088297843933105, |
|
"learning_rate": 1.1549472686100079e-05, |
|
"loss": 0.6346, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.0167544279559597, |
|
"grad_norm": 0.4739629924297333, |
|
"learning_rate": 1.145023812964237e-05, |
|
"loss": 0.67, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.022498803255146, |
|
"grad_norm": 0.4583199620246887, |
|
"learning_rate": 1.1350857474352734e-05, |
|
"loss": 0.5971, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.0282431785543322, |
|
"grad_norm": 0.473290354013443, |
|
"learning_rate": 1.1251340731964664e-05, |
|
"loss": 0.5754, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.0339875538535184, |
|
"grad_norm": 0.5124115943908691, |
|
"learning_rate": 1.1151697927921242e-05, |
|
"loss": 0.6753, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.0397319291527047, |
|
"grad_norm": 0.46393120288848877, |
|
"learning_rate": 1.1051939100365154e-05, |
|
"loss": 0.6188, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.0454763044518909, |
|
"grad_norm": 0.45003098249435425, |
|
"learning_rate": 1.0952074299127451e-05, |
|
"loss": 0.612, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.051220679751077, |
|
"grad_norm": 0.5510356426239014, |
|
"learning_rate": 1.0852113584715103e-05, |
|
"loss": 0.6337, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.0569650550502634, |
|
"grad_norm": 0.4436226785182953, |
|
"learning_rate": 1.0752067027297486e-05, |
|
"loss": 0.6242, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.0627094303494495, |
|
"grad_norm": 0.5315257906913757, |
|
"learning_rate": 1.065194470569193e-05, |
|
"loss": 0.6423, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.0684538056486357, |
|
"grad_norm": 0.5305011868476868, |
|
"learning_rate": 1.0551756706348331e-05, |
|
"loss": 0.6507, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.0741981809478218, |
|
"grad_norm": 0.40931978821754456, |
|
"learning_rate": 1.0451513122333042e-05, |
|
"loss": 0.6502, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.0799425562470082, |
|
"grad_norm": 0.5216091871261597, |
|
"learning_rate": 1.035122405231209e-05, |
|
"loss": 0.6465, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.0856869315461943, |
|
"grad_norm": 0.4610161781311035, |
|
"learning_rate": 1.0250899599533833e-05, |
|
"loss": 0.5979, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.0914313068453805, |
|
"grad_norm": 0.4795741140842438, |
|
"learning_rate": 1.0150549870811108e-05, |
|
"loss": 0.6318, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.0971756821445668, |
|
"grad_norm": 0.5046423673629761, |
|
"learning_rate": 1.0050184975503104e-05, |
|
"loss": 0.6334, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.102920057443753, |
|
"grad_norm": 0.41843080520629883, |
|
"learning_rate": 9.949815024496901e-06, |
|
"loss": 0.5911, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.1086644327429391, |
|
"grad_norm": 0.4864475131034851, |
|
"learning_rate": 9.849450129188895e-06, |
|
"loss": 0.6293, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.1144088080421255, |
|
"grad_norm": 0.45488062500953674, |
|
"learning_rate": 9.74910040046617e-06, |
|
"loss": 0.6293, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.1201531833413116, |
|
"grad_norm": 0.5488528609275818, |
|
"learning_rate": 9.648775947687914e-06, |
|
"loss": 0.6601, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.1258975586404978, |
|
"grad_norm": 0.42538103461265564, |
|
"learning_rate": 9.548486877666963e-06, |
|
"loss": 0.5716, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.1316419339396842, |
|
"grad_norm": 0.4884817898273468, |
|
"learning_rate": 9.448243293651676e-06, |
|
"loss": 0.6445, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.1373863092388703, |
|
"grad_norm": 0.3766753375530243, |
|
"learning_rate": 9.348055294308074e-06, |
|
"loss": 0.6133, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.1431306845380564, |
|
"grad_norm": 0.37008681893348694, |
|
"learning_rate": 9.247932972702514e-06, |
|
"loss": 0.5788, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.1488750598372426, |
|
"grad_norm": 0.47232893109321594, |
|
"learning_rate": 9.147886415284903e-06, |
|
"loss": 0.6026, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.154619435136429, |
|
"grad_norm": 0.4648306667804718, |
|
"learning_rate": 9.047925700872552e-06, |
|
"loss": 0.6373, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.160363810435615, |
|
"grad_norm": 0.4872569739818573, |
|
"learning_rate": 8.948060899634846e-06, |
|
"loss": 0.6569, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.1661081857348012, |
|
"grad_norm": 0.4655154049396515, |
|
"learning_rate": 8.848302072078762e-06, |
|
"loss": 0.6041, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.1718525610339876, |
|
"grad_norm": 0.4330590069293976, |
|
"learning_rate": 8.748659268035339e-06, |
|
"loss": 0.6465, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.1775969363331737, |
|
"grad_norm": 0.5113588571548462, |
|
"learning_rate": 8.649142525647271e-06, |
|
"loss": 0.6658, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.18334131163236, |
|
"grad_norm": 0.4262521266937256, |
|
"learning_rate": 8.549761870357633e-06, |
|
"loss": 0.6033, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.1890856869315463, |
|
"grad_norm": 0.4430098831653595, |
|
"learning_rate": 8.450527313899923e-06, |
|
"loss": 0.6045, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.1948300622307324, |
|
"grad_norm": 0.4243098199367523, |
|
"learning_rate": 8.351448853289448e-06, |
|
"loss": 0.6256, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.2005744375299185, |
|
"grad_norm": 0.43001773953437805, |
|
"learning_rate": 8.25253646981622e-06, |
|
"loss": 0.588, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.206318812829105, |
|
"grad_norm": 0.41961541771888733, |
|
"learning_rate": 8.153800128039441e-06, |
|
"loss": 0.6633, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.212063188128291, |
|
"grad_norm": 0.4193902909755707, |
|
"learning_rate": 8.05524977478364e-06, |
|
"loss": 0.6335, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.2178075634274772, |
|
"grad_norm": 0.3996220529079437, |
|
"learning_rate": 7.956895338136618e-06, |
|
"loss": 0.6314, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.2235519387266636, |
|
"grad_norm": 0.4009437561035156, |
|
"learning_rate": 7.858746726449309e-06, |
|
"loss": 0.5935, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.2292963140258497, |
|
"grad_norm": 0.42584890127182007, |
|
"learning_rate": 7.760813827337555e-06, |
|
"loss": 0.5841, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.2350406893250359, |
|
"grad_norm": 0.47986069321632385, |
|
"learning_rate": 7.663106506686057e-06, |
|
"loss": 0.6383, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.2407850646242222, |
|
"grad_norm": 0.46123167872428894, |
|
"learning_rate": 7.565634607654453e-06, |
|
"loss": 0.6486, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.2465294399234084, |
|
"grad_norm": 0.41050809621810913, |
|
"learning_rate": 7.468407949685695e-06, |
|
"loss": 0.5783, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.2522738152225945, |
|
"grad_norm": 0.4934438169002533, |
|
"learning_rate": 7.371436327516854e-06, |
|
"loss": 0.623, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.2580181905217809, |
|
"grad_norm": 0.46547457575798035, |
|
"learning_rate": 7.274729510192367e-06, |
|
"loss": 0.6213, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.263762565820967, |
|
"grad_norm": 0.47149044275283813, |
|
"learning_rate": 7.1782972400798825e-06, |
|
"loss": 0.6722, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.2695069411201532, |
|
"grad_norm": 0.4751822054386139, |
|
"learning_rate": 7.082149231888833e-06, |
|
"loss": 0.6478, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.2752513164193395, |
|
"grad_norm": 0.5404048562049866, |
|
"learning_rate": 6.986295171691727e-06, |
|
"loss": 0.6141, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.2809956917185257, |
|
"grad_norm": 0.4342450201511383, |
|
"learning_rate": 6.890744715948388e-06, |
|
"loss": 0.6657, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.2867400670177118, |
|
"grad_norm": 0.47704020142555237, |
|
"learning_rate": 6.795507490533142e-06, |
|
"loss": 0.6069, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.292484442316898, |
|
"grad_norm": 0.44684454798698425, |
|
"learning_rate": 6.700593089765086e-06, |
|
"loss": 0.5871, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.2982288176160843, |
|
"grad_norm": 0.4461020827293396, |
|
"learning_rate": 6.606011075441556e-06, |
|
"loss": 0.599, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.3039731929152705, |
|
"grad_norm": 0.5140913724899292, |
|
"learning_rate": 6.511770975874862e-06, |
|
"loss": 0.6503, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.3097175682144566, |
|
"grad_norm": 0.48571696877479553, |
|
"learning_rate": 6.417882284932373e-06, |
|
"loss": 0.6956, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.3154619435136428, |
|
"grad_norm": 0.4735938012599945, |
|
"learning_rate": 6.324354461080121e-06, |
|
"loss": 0.6384, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.3212063188128291, |
|
"grad_norm": 0.42931807041168213, |
|
"learning_rate": 6.231196926429913e-06, |
|
"loss": 0.6187, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.3269506941120153, |
|
"grad_norm": 0.43366044759750366, |
|
"learning_rate": 6.138419065790169e-06, |
|
"loss": 0.5666, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.3326950694112014, |
|
"grad_norm": 0.5050100684165955, |
|
"learning_rate": 6.046030225720456e-06, |
|
"loss": 0.6884, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.3384394447103878, |
|
"grad_norm": 0.4541442394256592, |
|
"learning_rate": 5.95403971358991e-06, |
|
"loss": 0.6237, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.344183820009574, |
|
"grad_norm": 0.4040675163269043, |
|
"learning_rate": 5.86245679663962e-06, |
|
"loss": 0.6313, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.34992819530876, |
|
"grad_norm": 0.40203991532325745, |
|
"learning_rate": 5.7712907010490036e-06, |
|
"loss": 0.6074, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.3556725706079464, |
|
"grad_norm": 0.42576372623443604, |
|
"learning_rate": 5.680550611006372e-06, |
|
"loss": 0.6887, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.3614169459071326, |
|
"grad_norm": 0.41085827350616455, |
|
"learning_rate": 5.590245667783701e-06, |
|
"loss": 0.585, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.3671613212063187, |
|
"grad_norm": 0.4034533202648163, |
|
"learning_rate": 5.5003849688157075e-06, |
|
"loss": 0.6249, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.372905696505505, |
|
"grad_norm": 0.3949970006942749, |
|
"learning_rate": 5.4109775667833866e-06, |
|
"loss": 0.6494, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.3786500718046912, |
|
"grad_norm": 0.3907564580440521, |
|
"learning_rate": 5.322032468702037e-06, |
|
"loss": 0.6499, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.3843944471038774, |
|
"grad_norm": 0.3859156370162964, |
|
"learning_rate": 5.233558635013842e-06, |
|
"loss": 0.6042, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.3901388224030637, |
|
"grad_norm": 0.39453139901161194, |
|
"learning_rate": 5.145564978685234e-06, |
|
"loss": 0.5661, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.39588319770225, |
|
"grad_norm": 0.4323676526546478, |
|
"learning_rate": 5.058060364308965e-06, |
|
"loss": 0.6489, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.401627573001436, |
|
"grad_norm": 0.4358120560646057, |
|
"learning_rate": 4.971053607211069e-06, |
|
"loss": 0.6446, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.4073719483006224, |
|
"grad_norm": 0.47731679677963257, |
|
"learning_rate": 4.884553472562809e-06, |
|
"loss": 0.6493, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.4131163235998085, |
|
"grad_norm": 0.4385620355606079, |
|
"learning_rate": 4.7985686744976714e-06, |
|
"loss": 0.6128, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.4188606988989947, |
|
"grad_norm": 0.49015676975250244, |
|
"learning_rate": 4.713107875233459e-06, |
|
"loss": 0.6496, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.424605074198181, |
|
"grad_norm": 0.4335099160671234, |
|
"learning_rate": 4.628179684199685e-06, |
|
"loss": 0.6078, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.4303494494973672, |
|
"grad_norm": 0.4025087058544159, |
|
"learning_rate": 4.543792657170228e-06, |
|
"loss": 0.6226, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.4360938247965533, |
|
"grad_norm": 0.37157928943634033, |
|
"learning_rate": 4.459955295401415e-06, |
|
"loss": 0.606, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.4418382000957397, |
|
"grad_norm": 0.46268367767333984, |
|
"learning_rate": 4.376676044775601e-06, |
|
"loss": 0.6484, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.4475825753949259, |
|
"grad_norm": 0.3992196321487427, |
|
"learning_rate": 4.293963294950313e-06, |
|
"loss": 0.6726, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.453326950694112, |
|
"grad_norm": 0.3783876597881317, |
|
"learning_rate": 4.211825378513066e-06, |
|
"loss": 0.5912, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.4590713259932984, |
|
"grad_norm": 0.4121813178062439, |
|
"learning_rate": 4.130270570141931e-06, |
|
"loss": 0.6252, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.4648157012924845, |
|
"grad_norm": 0.363212913274765, |
|
"learning_rate": 4.0493070857719305e-06, |
|
"loss": 0.6383, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.4705600765916707, |
|
"grad_norm": 0.37992483377456665, |
|
"learning_rate": 3.968943081767358e-06, |
|
"loss": 0.6271, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.4763044518908568, |
|
"grad_norm": 0.4617908000946045, |
|
"learning_rate": 3.889186654100089e-06, |
|
"loss": 0.6601, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.4820488271900432, |
|
"grad_norm": 0.3424414396286011, |
|
"learning_rate": 3.81004583753399e-06, |
|
"loss": 0.5911, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.4877932024892293, |
|
"grad_norm": 0.41951489448547363, |
|
"learning_rate": 3.7315286048154862e-06, |
|
"loss": 0.6659, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.4935375777884154, |
|
"grad_norm": 0.42662620544433594, |
|
"learning_rate": 3.6536428658703594e-06, |
|
"loss": 0.6263, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.4992819530876016, |
|
"grad_norm": 0.3791210651397705, |
|
"learning_rate": 3.576396467006925e-06, |
|
"loss": 0.5559, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.505026328386788, |
|
"grad_norm": 0.3816741406917572, |
|
"learning_rate": 3.4997971901255588e-06, |
|
"loss": 0.5874, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.510770703685974, |
|
"grad_norm": 0.41983887553215027, |
|
"learning_rate": 3.4238527519347353e-06, |
|
"loss": 0.6062, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.5165150789851602, |
|
"grad_norm": 0.4243004620075226, |
|
"learning_rate": 3.3485708031736698e-06, |
|
"loss": 0.6598, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.5222594542843466, |
|
"grad_norm": 0.38482630252838135, |
|
"learning_rate": 3.2739589278415252e-06, |
|
"loss": 0.5887, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.5280038295835328, |
|
"grad_norm": 0.44671764969825745, |
|
"learning_rate": 3.2000246424334315e-06, |
|
"loss": 0.6041, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.533748204882719, |
|
"grad_norm": 0.4712686836719513, |
|
"learning_rate": 3.1267753951832523e-06, |
|
"loss": 0.6144, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.5394925801819053, |
|
"grad_norm": 0.4231863021850586, |
|
"learning_rate": 3.0542185653132216e-06, |
|
"loss": 0.63, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.5452369554810914, |
|
"grad_norm": 0.4306733310222626, |
|
"learning_rate": 2.982361462290575e-06, |
|
"loss": 0.6251, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.5509813307802776, |
|
"grad_norm": 0.3925183117389679, |
|
"learning_rate": 2.9112113250911844e-06, |
|
"loss": 0.6039, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.556725706079464, |
|
"grad_norm": 0.3975939452648163, |
|
"learning_rate": 2.8407753214702694e-06, |
|
"loss": 0.7012, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.56247008137865, |
|
"grad_norm": 0.42296043038368225, |
|
"learning_rate": 2.7710605472403373e-06, |
|
"loss": 0.6065, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.5682144566778362, |
|
"grad_norm": 0.3874426782131195, |
|
"learning_rate": 2.702074025556327e-06, |
|
"loss": 0.6414, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.5739588319770226, |
|
"grad_norm": 0.4664260745048523, |
|
"learning_rate": 2.6338227062080924e-06, |
|
"loss": 0.6497, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.5797032072762087, |
|
"grad_norm": 0.4342009127140045, |
|
"learning_rate": 2.566313464920265e-06, |
|
"loss": 0.6364, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.5854475825753949, |
|
"grad_norm": 0.47467392683029175, |
|
"learning_rate": 2.4995531026595952e-06, |
|
"loss": 0.6311, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.5911919578745812, |
|
"grad_norm": 0.42795056104660034, |
|
"learning_rate": 2.4335483449498053e-06, |
|
"loss": 0.6331, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.5969363331737674, |
|
"grad_norm": 0.41573217511177063, |
|
"learning_rate": 2.3683058411940563e-06, |
|
"loss": 0.545, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.6026807084729535, |
|
"grad_norm": 0.475724995136261, |
|
"learning_rate": 2.3038321640050763e-06, |
|
"loss": 0.6338, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.6084250837721399, |
|
"grad_norm": 0.41712650656700134, |
|
"learning_rate": 2.2401338085430326e-06, |
|
"loss": 0.6337, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.614169459071326, |
|
"grad_norm": 0.4464361071586609, |
|
"learning_rate": 2.177217191861183e-06, |
|
"loss": 0.6455, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.6199138343705122, |
|
"grad_norm": 0.3937479555606842, |
|
"learning_rate": 2.115088652259446e-06, |
|
"loss": 0.6257, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.6256582096696985, |
|
"grad_norm": 0.41509386897087097, |
|
"learning_rate": 2.053754448645846e-06, |
|
"loss": 0.6399, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.6314025849688847, |
|
"grad_norm": 0.39400720596313477, |
|
"learning_rate": 1.9932207599059782e-06, |
|
"loss": 0.6712, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.6371469602680708, |
|
"grad_norm": 0.4166524112224579, |
|
"learning_rate": 1.933493684280574e-06, |
|
"loss": 0.596, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.6428913355672572, |
|
"grad_norm": 0.39237356185913086, |
|
"learning_rate": 1.8745792387511241e-06, |
|
"loss": 0.6154, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.6486357108664431, |
|
"grad_norm": 0.44719427824020386, |
|
"learning_rate": 1.8164833584337216e-06, |
|
"loss": 0.6634, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.6543800861656295, |
|
"grad_norm": 0.4033436179161072, |
|
"learning_rate": 1.75921189598118e-06, |
|
"loss": 0.5736, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.6601244614648158, |
|
"grad_norm": 0.3653382956981659, |
|
"learning_rate": 1.7027706209933903e-06, |
|
"loss": 0.6452, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.6658688367640018, |
|
"grad_norm": 0.47097504138946533, |
|
"learning_rate": 1.6471652194361131e-06, |
|
"loss": 0.6236, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.6716132120631881, |
|
"grad_norm": 0.401380330324173, |
|
"learning_rate": 1.5924012930681643e-06, |
|
"loss": 0.5874, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.6773575873623745, |
|
"grad_norm": 0.48601004481315613, |
|
"learning_rate": 1.5384843588770626e-06, |
|
"loss": 0.6091, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.6831019626615604, |
|
"grad_norm": 0.3870960772037506, |
|
"learning_rate": 1.4854198485232696e-06, |
|
"loss": 0.6612, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.6888463379607468, |
|
"grad_norm": 0.4852856993675232, |
|
"learning_rate": 1.433213107792991e-06, |
|
"loss": 0.6921, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.694590713259933, |
|
"grad_norm": 0.404511958360672, |
|
"learning_rate": 1.3818693960596186e-06, |
|
"loss": 0.6112, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.700335088559119, |
|
"grad_norm": 0.37528789043426514, |
|
"learning_rate": 1.3313938857539133e-06, |
|
"loss": 0.5827, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.7060794638583054, |
|
"grad_norm": 0.44467613101005554, |
|
"learning_rate": 1.2817916618429194e-06, |
|
"loss": 0.6202, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.7118238391574916, |
|
"grad_norm": 0.4253291189670563, |
|
"learning_rate": 1.2330677213177034e-06, |
|
"loss": 0.6565, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.7175682144566777, |
|
"grad_norm": 0.4275934100151062, |
|
"learning_rate": 1.1852269726899423e-06, |
|
"loss": 0.6204, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.723312589755864, |
|
"grad_norm": 0.3519365191459656, |
|
"learning_rate": 1.138274235497443e-06, |
|
"loss": 0.5743, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.7290569650550502, |
|
"grad_norm": 0.4690391719341278, |
|
"learning_rate": 1.0922142398186097e-06, |
|
"loss": 0.6739, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.7348013403542364, |
|
"grad_norm": 0.34954187273979187, |
|
"learning_rate": 1.0470516257959351e-06, |
|
"loss": 0.5869, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.7405457156534228, |
|
"grad_norm": 0.4536847174167633, |
|
"learning_rate": 1.00279094316854e-06, |
|
"loss": 0.5976, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.746290090952609, |
|
"grad_norm": 0.46374931931495667, |
|
"learning_rate": 9.594366508138352e-07, |
|
"loss": 0.6411, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.752034466251795, |
|
"grad_norm": 0.3895288109779358, |
|
"learning_rate": 9.169931162983137e-07, |
|
"loss": 0.6613, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.7577788415509814, |
|
"grad_norm": 0.49768176674842834, |
|
"learning_rate": 8.754646154375801e-07, |
|
"loss": 0.626, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.7635232168501676, |
|
"grad_norm": 0.40564650297164917, |
|
"learning_rate": 8.348553318655795e-07, |
|
"loss": 0.5868, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.7692675921493537, |
|
"grad_norm": 0.42296889424324036, |
|
"learning_rate": 7.951693566131325e-07, |
|
"loss": 0.6417, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.77501196744854, |
|
"grad_norm": 0.4061867892742157, |
|
"learning_rate": 7.564106876958188e-07, |
|
"loss": 0.5981, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.7807563427477262, |
|
"grad_norm": 0.39801332354545593, |
|
"learning_rate": 7.185832297111939e-07, |
|
"loss": 0.5885, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.7865007180469124, |
|
"grad_norm": 0.4562993049621582, |
|
"learning_rate": 6.816907934454353e-07, |
|
"loss": 0.6611, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.7922450933460987, |
|
"grad_norm": 0.426230788230896, |
|
"learning_rate": 6.457370954894582e-07, |
|
"loss": 0.6083, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.7979894686452849, |
|
"grad_norm": 0.432153582572937, |
|
"learning_rate": 6.107257578644721e-07, |
|
"loss": 0.5977, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.803733843944471, |
|
"grad_norm": 0.3961557447910309, |
|
"learning_rate": 5.766603076571164e-07, |
|
"loss": 0.5776, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.8094782192436574, |
|
"grad_norm": 0.406272828578949, |
|
"learning_rate": 5.43544176664137e-07, |
|
"loss": 0.6229, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.8152225945428435, |
|
"grad_norm": 0.4153190851211548, |
|
"learning_rate": 5.113807010466432e-07, |
|
"loss": 0.6814, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.8209669698420297, |
|
"grad_norm": 0.467479407787323, |
|
"learning_rate": 4.801731209940375e-07, |
|
"loss": 0.624, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.826711345141216, |
|
"grad_norm": 0.40130171179771423, |
|
"learning_rate": 4.499245803975927e-07, |
|
"loss": 0.6028, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.832455720440402, |
|
"grad_norm": 0.40247321128845215, |
|
"learning_rate": 4.206381265337189e-07, |
|
"loss": 0.6034, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.8382000957395883, |
|
"grad_norm": 0.5517602562904358, |
|
"learning_rate": 3.9231670975699354e-07, |
|
"loss": 0.6168, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.8439444710387747, |
|
"grad_norm": 0.3919602036476135, |
|
"learning_rate": 3.649631832029288e-07, |
|
"loss": 0.6464, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.8496888463379606, |
|
"grad_norm": 0.4361954927444458, |
|
"learning_rate": 3.385803025005463e-07, |
|
"loss": 0.5725, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.855433221637147, |
|
"grad_norm": 0.4158506393432617, |
|
"learning_rate": 3.1317072549477246e-07, |
|
"loss": 0.6419, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.8611775969363333, |
|
"grad_norm": 0.40699517726898193, |
|
"learning_rate": 2.887370119786792e-07, |
|
"loss": 0.6496, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.8669219722355193, |
|
"grad_norm": 0.42238280177116394, |
|
"learning_rate": 2.6528162343561593e-07, |
|
"loss": 0.6192, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.8726663475347056, |
|
"grad_norm": 0.4099232256412506, |
|
"learning_rate": 2.4280692279122554e-07, |
|
"loss": 0.6027, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.8784107228338918, |
|
"grad_norm": 0.4579300284385681, |
|
"learning_rate": 2.2131517417540937e-07, |
|
"loss": 0.6208, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.884155098133078, |
|
"grad_norm": 0.4043782353401184, |
|
"learning_rate": 2.00808542694233e-07, |
|
"loss": 0.6455, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.8898994734322643, |
|
"grad_norm": 0.4796655476093292, |
|
"learning_rate": 1.8128909421180506e-07, |
|
"loss": 0.5887, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.8956438487314504, |
|
"grad_norm": 0.42188167572021484, |
|
"learning_rate": 1.6275879514217052e-07, |
|
"loss": 0.59, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.9013882240306366, |
|
"grad_norm": 0.44038787484169006, |
|
"learning_rate": 1.4521951225120345e-07, |
|
"loss": 0.6184, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.907132599329823, |
|
"grad_norm": 0.43046262860298157, |
|
"learning_rate": 1.2867301246854757e-07, |
|
"loss": 0.559, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.912876974629009, |
|
"grad_norm": 0.37012651562690735, |
|
"learning_rate": 1.1312096270961525e-07, |
|
"loss": 0.5971, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.9186213499281952, |
|
"grad_norm": 0.4588046967983246, |
|
"learning_rate": 9.856492970766296e-08, |
|
"loss": 0.6449, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.9243657252273816, |
|
"grad_norm": 0.3975079357624054, |
|
"learning_rate": 8.50063798559475e-08, |
|
"loss": 0.6467, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.9301101005265677, |
|
"grad_norm": 0.4464259445667267, |
|
"learning_rate": 7.244667906001202e-08, |
|
"loss": 0.5781, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.9358544758257539, |
|
"grad_norm": 0.3905969262123108, |
|
"learning_rate": 6.088709260007153e-08, |
|
"loss": 0.5607, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.9415988511249402, |
|
"grad_norm": 0.3919637203216553, |
|
"learning_rate": 5.032878500355498e-08, |
|
"loss": 0.592, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.9473432264241264, |
|
"grad_norm": 0.41406241059303284, |
|
"learning_rate": 4.07728199277857e-08, |
|
"loss": 0.6728, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.9530876017233125, |
|
"grad_norm": 0.40807828307151794, |
|
"learning_rate": 3.2220160052828245e-08, |
|
"loss": 0.622, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.958831977022499, |
|
"grad_norm": 0.4522593319416046, |
|
"learning_rate": 2.467166698450485e-08, |
|
"loss": 0.6369, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.964576352321685, |
|
"grad_norm": 0.46903252601623535, |
|
"learning_rate": 1.812810116760044e-08, |
|
"loss": 0.6429, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.9703207276208712, |
|
"grad_norm": 0.429757684469223, |
|
"learning_rate": 1.2590121809247235e-08, |
|
"loss": 0.5928, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.9760651029200575, |
|
"grad_norm": 0.4194485545158386, |
|
"learning_rate": 8.05828681252452e-09, |
|
"loss": 0.6151, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.9818094782192437, |
|
"grad_norm": 0.4075304865837097, |
|
"learning_rate": 4.5330527202480656e-09, |
|
"loss": 0.6248, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.9875538535184298, |
|
"grad_norm": 0.3920026421546936, |
|
"learning_rate": 2.014774668979147e-09, |
|
"loss": 0.6467, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.9932982288176162, |
|
"grad_norm": 0.4989790916442871, |
|
"learning_rate": 5.037063532498109e-10, |
|
"loss": 0.6511, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.9990426041168023, |
|
"grad_norm": 0.4017556607723236, |
|
"learning_rate": 0.0, |
|
"loss": 0.5548, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.9990426041168023, |
|
"step": 348, |
|
"total_flos": 155889590697984.0, |
|
"train_loss": 0.45673759161740884, |
|
"train_runtime": 8023.7095, |
|
"train_samples_per_second": 4.165, |
|
"train_steps_per_second": 0.043 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 348, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 155889590697984.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|