|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9756244357508277, |
|
"eval_steps": 200, |
|
"global_step": 309, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 3.5752, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 3.4389, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.2e-05, |
|
"loss": 2.2415, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 1.6566, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2e-05, |
|
"loss": 1.5019, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.4e-05, |
|
"loss": 1.409, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.8e-05, |
|
"loss": 1.4669, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 1.9948, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.6e-05, |
|
"loss": 1.3069, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4e-05, |
|
"loss": 1.276, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.9998896039909675e-05, |
|
"loss": 1.2514, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 3.999558428151149e-05, |
|
"loss": 1.4888, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 3.999006509041036e-05, |
|
"loss": 1.4359, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 3.998233907590294e-05, |
|
"loss": 1.225, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 3.997240709091041e-05, |
|
"loss": 1.2955, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 3.996027023188427e-05, |
|
"loss": 1.2106, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.994592983868533e-05, |
|
"loss": 1.1727, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 3.9929387494435746e-05, |
|
"loss": 1.21, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 3.9910645025344324e-05, |
|
"loss": 1.1913, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 3.988970450050483e-05, |
|
"loss": 1.1689, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.986656823166766e-05, |
|
"loss": 1.1868, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.984123877298454e-05, |
|
"loss": 1.139, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.981371892072661e-05, |
|
"loss": 1.1681, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.978401171297576e-05, |
|
"loss": 1.1059, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.9752120429289135e-05, |
|
"loss": 1.1138, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.9718048590337186e-05, |
|
"loss": 1.0705, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.9681799957514965e-05, |
|
"loss": 1.1247, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.964337853252685e-05, |
|
"loss": 1.1524, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.960278855694484e-05, |
|
"loss": 1.0491, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.956003451174023e-05, |
|
"loss": 1.1492, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.9515121116788985e-05, |
|
"loss": 1.1623, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.946805333035067e-05, |
|
"loss": 1.1024, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.9418836348521045e-05, |
|
"loss": 1.0908, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.9367475604658485e-05, |
|
"loss": 1.1505, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.931397676878415e-05, |
|
"loss": 1.0795, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.925834574695599e-05, |
|
"loss": 1.0738, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.92005886806168e-05, |
|
"loss": 1.0591, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.9140711945916197e-05, |
|
"loss": 1.0694, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.907872215300672e-05, |
|
"loss": 1.0729, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.9014626145314115e-05, |
|
"loss": 1.1091, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.8948430998781824e-05, |
|
"loss": 1.0049, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.888014402108984e-05, |
|
"loss": 1.0333, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.8809772750847964e-05, |
|
"loss": 1.0432, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.87373249567636e-05, |
|
"loss": 0.9931, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.8662808636784056e-05, |
|
"loss": 1.0425, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 3.8586232017213675e-05, |
|
"loss": 1.0287, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 3.850760355180564e-05, |
|
"loss": 0.9849, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 3.842693192082874e-05, |
|
"loss": 1.0096, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 3.834422603010906e-05, |
|
"loss": 0.9763, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 3.8259495010046876e-05, |
|
"loss": 0.9852, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 3.8172748214608624e-05, |
|
"loss": 1.0338, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 3.808399522029433e-05, |
|
"loss": 0.9748, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 3.7993245825080364e-05, |
|
"loss": 0.9784, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 3.790051004733775e-05, |
|
"loss": 0.9736, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 3.7805798124726283e-05, |
|
"loss": 1.0221, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 3.7709120513064196e-05, |
|
"loss": 0.9352, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 3.7610487885174e-05, |
|
"loss": 0.9668, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 3.7509911129704165e-05, |
|
"loss": 0.9478, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 3.74074013499271e-05, |
|
"loss": 0.941, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 3.730296986251337e-05, |
|
"loss": 0.9212, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 3.7196628196282415e-05, |
|
"loss": 0.8714, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.7088388090929776e-05, |
|
"loss": 0.948, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.69782614957311e-05, |
|
"loss": 0.9212, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.686626056822298e-05, |
|
"loss": 0.8565, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.675239767286083e-05, |
|
"loss": 0.9639, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.6636685379653875e-05, |
|
"loss": 0.877, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.651913646277747e-05, |
|
"loss": 0.8497, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.639976389916292e-05, |
|
"loss": 0.8182, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.627858086706483e-05, |
|
"loss": 0.853, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.61556007446063e-05, |
|
"loss": 0.7592, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.603083710830205e-05, |
|
"loss": 0.9022, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.5904303731559584e-05, |
|
"loss": 0.8494, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.5776014583158714e-05, |
|
"loss": 0.7637, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.564598382570943e-05, |
|
"loss": 0.8176, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.55142258140884e-05, |
|
"loss": 0.8738, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.538075509385427e-05, |
|
"loss": 0.7815, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.524558639964187e-05, |
|
"loss": 0.8885, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.5108734653535587e-05, |
|
"loss": 0.8237, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.497021496342203e-05, |
|
"loss": 0.7413, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.4830042621322145e-05, |
|
"loss": 0.7746, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.468823310170309e-05, |
|
"loss": 0.8366, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.4544802059769864e-05, |
|
"loss": 0.76, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.439976532973708e-05, |
|
"loss": 0.8352, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.425313892308089e-05, |
|
"loss": 0.7205, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.41049390267714e-05, |
|
"loss": 0.7648, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.395518200148571e-05, |
|
"loss": 0.8244, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.380388437980173e-05, |
|
"loss": 0.7936, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.365106286437309e-05, |
|
"loss": 0.7051, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.3496734326085154e-05, |
|
"loss": 0.7281, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.3340915802192655e-05, |
|
"loss": 0.8165, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.318362449443876e-05, |
|
"loss": 0.7686, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.3024877767156094e-05, |
|
"loss": 0.7015, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.2864693145349795e-05, |
|
"loss": 0.707, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.2703088312762825e-05, |
|
"loss": 0.6862, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.254008110992373e-05, |
|
"loss": 0.646, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.237568953217717e-05, |
|
"loss": 0.5646, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.220993172769723e-05, |
|
"loss": 0.6573, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.204282599548399e-05, |
|
"loss": 0.631, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.187439078334338e-05, |
|
"loss": 0.5593, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.170464468585059e-05, |
|
"loss": 0.6312, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.153360644229735e-05, |
|
"loss": 0.6508, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.136129493462312e-05, |
|
"loss": 0.6596, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.1187729185330665e-05, |
|
"loss": 0.6439, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.101292835538602e-05, |
|
"loss": 0.6206, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.083691174210318e-05, |
|
"loss": 0.486, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.065969877701378e-05, |
|
"loss": 0.5089, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.04813090237219e-05, |
|
"loss": 0.4347, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.030176217574437e-05, |
|
"loss": 0.4901, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.0121078054336633e-05, |
|
"loss": 0.4452, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 2.9939276606304592e-05, |
|
"loss": 0.4698, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 2.975637790180255e-05, |
|
"loss": 0.4264, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 2.9572402132117533e-05, |
|
"loss": 0.4965, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 2.9387369607440277e-05, |
|
"loss": 0.4725, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 2.9201300754623046e-05, |
|
"loss": 0.4696, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 2.9014216114924595e-05, |
|
"loss": 0.4647, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 2.8826136341742504e-05, |
|
"loss": 0.4576, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 2.863708219833311e-05, |
|
"loss": 0.4064, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 2.8447074555519335e-05, |
|
"loss": 0.5083, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 2.825613438938663e-05, |
|
"loss": 0.4381, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 2.8064282778967296e-05, |
|
"loss": 0.4829, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 2.7871540903913465e-05, |
|
"loss": 0.4532, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 2.7677930042158893e-05, |
|
"loss": 0.4142, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 2.7483471567570045e-05, |
|
"loss": 0.4921, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 2.7288186947586426e-05, |
|
"loss": 0.4045, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.7092097740850712e-05, |
|
"loss": 0.4199, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.6895225594828743e-05, |
|
"loss": 0.4419, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.6697592243419723e-05, |
|
"loss": 0.4105, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.6499219504556907e-05, |
|
"loss": 0.3799, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.630012927779896e-05, |
|
"loss": 0.4399, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.6100343541912353e-05, |
|
"loss": 0.3672, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.5899884352444994e-05, |
|
"loss": 0.4002, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.5698773839291373e-05, |
|
"loss": 0.4816, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.549703420424951e-05, |
|
"loss": 0.4849, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.5294687718569994e-05, |
|
"loss": 0.3788, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.5091756720497266e-05, |
|
"loss": 0.3748, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.4888263612803637e-05, |
|
"loss": 0.3789, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.468423086031605e-05, |
|
"loss": 0.4272, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.44796809874361e-05, |
|
"loss": 0.4927, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.4274636575653398e-05, |
|
"loss": 0.4966, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.4069120261052682e-05, |
|
"loss": 0.2791, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.3863154731814867e-05, |
|
"loss": 0.4203, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.365676272571239e-05, |
|
"loss": 0.389, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.344996702759901e-05, |
|
"loss": 0.4018, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.3242790466894494e-05, |
|
"loss": 0.4103, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.303525591506431e-05, |
|
"loss": 0.3833, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.2827386283094707e-05, |
|
"loss": 0.3272, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.2619204518963452e-05, |
|
"loss": 0.3897, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.2410733605106462e-05, |
|
"loss": 0.3893, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.2201996555880633e-05, |
|
"loss": 0.3954, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.199301641502313e-05, |
|
"loss": 0.4332, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.178381625310748e-05, |
|
"loss": 0.4815, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.157441916499663e-05, |
|
"loss": 0.4959, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.1364848267293424e-05, |
|
"loss": 0.2849, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.115512669578857e-05, |
|
"loss": 0.4259, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.0945277602906574e-05, |
|
"loss": 0.4646, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.0735324155149795e-05, |
|
"loss": 0.3296, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.0525289530540945e-05, |
|
"loss": 0.3316, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.031519691606433e-05, |
|
"loss": 0.2673, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.0105069505106126e-05, |
|
"loss": 0.3335, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.9894930494893884e-05, |
|
"loss": 0.4136, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.9684803083935676e-05, |
|
"loss": 0.3702, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.9474710469459062e-05, |
|
"loss": 0.322, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.926467584485021e-05, |
|
"loss": 0.2832, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.905472239709343e-05, |
|
"loss": 0.3917, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.8844873304211435e-05, |
|
"loss": 0.3641, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.8635151732706586e-05, |
|
"loss": 0.3516, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.8425580835003376e-05, |
|
"loss": 0.2568, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.8216183746892528e-05, |
|
"loss": 0.4459, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.8006983584976877e-05, |
|
"loss": 0.4058, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.7798003444119374e-05, |
|
"loss": 0.4351, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.758926639489354e-05, |
|
"loss": 0.4268, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.7380795481036558e-05, |
|
"loss": 0.2704, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.7172613716905306e-05, |
|
"loss": 0.3372, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.69647440849357e-05, |
|
"loss": 0.3816, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.6757209533105512e-05, |
|
"loss": 0.3816, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.6550032972400996e-05, |
|
"loss": 0.3937, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.634323727428762e-05, |
|
"loss": 0.3147, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.613684526818514e-05, |
|
"loss": 0.3302, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.5930879738947328e-05, |
|
"loss": 0.4236, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.572536342434661e-05, |
|
"loss": 0.2248, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.552031901256391e-05, |
|
"loss": 0.3379, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.531576913968396e-05, |
|
"loss": 0.3467, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.5111736387196377e-05, |
|
"loss": 0.3314, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.4908243279502741e-05, |
|
"loss": 0.3386, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.4705312281430013e-05, |
|
"loss": 0.3905, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.4502965795750487e-05, |
|
"loss": 0.4172, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.430122616070863e-05, |
|
"loss": 0.3216, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.4100115647555006e-05, |
|
"loss": 0.2909, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.3899656458087647e-05, |
|
"loss": 0.272, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.3699870722201043e-05, |
|
"loss": 0.3334, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.3500780495443098e-05, |
|
"loss": 0.3541, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.3302407756580278e-05, |
|
"loss": 0.3599, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.3104774405171259e-05, |
|
"loss": 0.2508, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.2907902259149287e-05, |
|
"loss": 0.3862, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.2711813052413575e-05, |
|
"loss": 0.3328, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.2516528432429955e-05, |
|
"loss": 0.2619, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.232206995784111e-05, |
|
"loss": 0.3766, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.2128459096086544e-05, |
|
"loss": 0.3733, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.1935717221032707e-05, |
|
"loss": 0.2327, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.1743865610613377e-05, |
|
"loss": 0.3572, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.1552925444480674e-05, |
|
"loss": 0.1715, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.1362917801666895e-05, |
|
"loss": 0.2646, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.11738636582575e-05, |
|
"loss": 0.2298, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.0985783885075407e-05, |
|
"loss": 0.282, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.0798699245376959e-05, |
|
"loss": 0.3746, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.0612630392559728e-05, |
|
"loss": 0.318, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.0427597867882474e-05, |
|
"loss": 0.2953, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.0243622098197456e-05, |
|
"loss": 0.2542, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.0060723393695411e-05, |
|
"loss": 0.2674, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 9.878921945663368e-06, |
|
"loss": 0.2679, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 9.698237824255634e-06, |
|
"loss": 0.2404, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 9.518690976278108e-06, |
|
"loss": 0.227, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 9.340301222986232e-06, |
|
"loss": 0.1782, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 9.163088257896825e-06, |
|
"loss": 0.2436, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 8.987071644613985e-06, |
|
"loss": 0.253, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 8.812270814669338e-06, |
|
"loss": 0.2591, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 8.638705065376887e-06, |
|
"loss": 0.2712, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 8.466393557702659e-06, |
|
"loss": 0.2411, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 8.295355314149413e-06, |
|
"loss": 0.2139, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 8.125609216656627e-06, |
|
"loss": 0.2237, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 7.957174004516015e-06, |
|
"loss": 0.1817, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 7.790068272302776e-06, |
|
"loss": 0.1816, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 7.624310467822833e-06, |
|
"loss": 0.2413, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 7.459918890076272e-06, |
|
"loss": 0.2405, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 7.296911687237187e-06, |
|
"loss": 0.2459, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 7.1353068546502144e-06, |
|
"loss": 0.2203, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 6.975122232843916e-06, |
|
"loss": 0.1892, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 6.816375505561248e-06, |
|
"loss": 0.191, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 6.659084197807348e-06, |
|
"loss": 0.2545, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 6.503265673914849e-06, |
|
"loss": 0.2312, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 6.348937135626922e-06, |
|
"loss": 0.1733, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 6.196115620198271e-06, |
|
"loss": 0.176, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 6.044817998514296e-06, |
|
"loss": 0.2399, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 5.895060973228606e-06, |
|
"loss": 0.2473, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 5.7468610769191195e-06, |
|
"loss": 0.248, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 5.600234670262925e-06, |
|
"loss": 0.2179, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 5.455197940230137e-06, |
|
"loss": 0.277, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 5.311766898296915e-06, |
|
"loss": 0.221, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 5.169957378677859e-06, |
|
"loss": 0.2477, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 5.029785036577976e-06, |
|
"loss": 0.2348, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 4.891265346464416e-06, |
|
"loss": 0.1883, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 4.7544136003581365e-06, |
|
"loss": 0.2551, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 4.619244906145734e-06, |
|
"loss": 0.2097, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 4.4857741859116024e-06, |
|
"loss": 0.2101, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 4.354016174290572e-06, |
|
"loss": 0.2218, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 4.223985416841292e-06, |
|
"loss": 0.3078, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 4.095696268440426e-06, |
|
"loss": 0.2115, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 3.969162891697962e-06, |
|
"loss": 0.1939, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 3.844399255393705e-06, |
|
"loss": 0.1181, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 3.7214191329351735e-06, |
|
"loss": 0.2248, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 3.6002361008370802e-06, |
|
"loss": 0.1661, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 3.4808635372225276e-06, |
|
"loss": 0.1613, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 3.3633146203461275e-06, |
|
"loss": 0.226, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 3.2476023271391698e-06, |
|
"loss": 0.1599, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 3.1337394317770208e-06, |
|
"loss": 0.1809, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 3.021738504268905e-06, |
|
"loss": 0.1874, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 2.911611909070229e-06, |
|
"loss": 0.1993, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 2.8033718037175915e-06, |
|
"loss": 0.2099, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 2.6970301374866337e-06, |
|
"loss": 0.1388, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 2.5925986500729083e-06, |
|
"loss": 0.2015, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 2.490088870295839e-06, |
|
"loss": 0.233, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 2.3895121148260027e-06, |
|
"loss": 0.2557, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 2.2908794869358044e-06, |
|
"loss": 0.1154, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 2.1942018752737227e-06, |
|
"loss": 0.1874, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 2.099489952662248e-06, |
|
"loss": 0.2139, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 2.0067541749196453e-06, |
|
"loss": 0.2447, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.916004779705669e-06, |
|
"loss": 0.2365, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.8272517853913775e-06, |
|
"loss": 0.2033, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.740504989953129e-06, |
|
"loss": 0.2648, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.6557739698909436e-06, |
|
"loss": 0.1375, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.573068079171265e-06, |
|
"loss": 0.1661, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.4923964481943599e-06, |
|
"loss": 0.2003, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.4137679827863293e-06, |
|
"loss": 0.2477, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.3371913632159506e-06, |
|
"loss": 0.2486, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.2626750432364077e-06, |
|
"loss": 0.1322, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.1902272491520362e-06, |
|
"loss": 0.2583, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.119855978910165e-06, |
|
"loss": 0.2542, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.0515690012181823e-06, |
|
"loss": 0.2442, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 9.853738546858893e-07, |
|
"loss": 0.2323, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 9.212778469932848e-07, |
|
"loss": 0.2138, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 8.592880540838111e-07, |
|
"loss": 0.183, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 7.994113193832076e-07, |
|
"loss": 0.2025, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 7.416542530440174e-07, |
|
"loss": 0.215, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 6.860232312158554e-07, |
|
"loss": 0.2263, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 6.325243953415117e-07, |
|
"loss": 0.2348, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 5.811636514789598e-07, |
|
"loss": 0.1654, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 5.31946669649337e-07, |
|
"loss": 0.1634, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.848788832110151e-07, |
|
"loss": 0.2984, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 4.399654882597726e-07, |
|
"loss": 0.216, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.972114430551632e-07, |
|
"loss": 0.1855, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.5662146747315054e-07, |
|
"loss": 0.138, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.1820004248503957e-07, |
|
"loss": 0.2327, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.8195140966281285e-07, |
|
"loss": 0.19, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.478795707108672e-07, |
|
"loss": 0.2633, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.1598828702424467e-07, |
|
"loss": 0.252, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.862810792733849e-07, |
|
"loss": 0.1745, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.5876122701546481e-07, |
|
"loss": 0.2287, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.3343176833234161e-07, |
|
"loss": 0.2116, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.1029549949516549e-07, |
|
"loss": 0.227, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 8.935497465567989e-08, |
|
"loss": 0.2794, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 7.06125055642537e-08, |
|
"loss": 0.2103, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 5.4070161314676574e-08, |
|
"loss": 0.2188, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.972976811573048e-08, |
|
"loss": 0.2068, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.7592909089593224e-08, |
|
"loss": 0.2937, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.766092409706266e-08, |
|
"loss": 0.3126, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 9.934909589646157e-09, |
|
"loss": 0.1625, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 4.415718488510745e-09, |
|
"loss": 0.2465, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 1.1039600903250958e-09, |
|
"loss": 0.2177, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 0.0, |
|
"loss": 0.2132, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"step": 309, |
|
"total_flos": 1.3696627367060921e+21, |
|
"train_loss": 0.5500956740242378, |
|
"train_runtime": 5438.4326, |
|
"train_samples_per_second": 5.498, |
|
"train_steps_per_second": 0.057 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 309, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 200, |
|
"total_flos": 1.3696627367060921e+21, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|