| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.995319812792512, | |
| "global_step": 420, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 0.0, | |
| "loss": 1.4861, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 0.0, | |
| "loss": 1.4901, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.5384615384615387e-06, | |
| "loss": 1.4646, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 3.0769230769230774e-06, | |
| "loss": 1.44, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 4.615384615384616e-06, | |
| "loss": 1.4355, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 6.153846153846155e-06, | |
| "loss": 1.1764, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 7.692307692307694e-06, | |
| "loss": 1.0584, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 9.230769230769232e-06, | |
| "loss": 1.0187, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.076923076923077e-05, | |
| "loss": 1.0039, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 1.230769230769231e-05, | |
| "loss": 0.9911, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.3846153846153847e-05, | |
| "loss": 0.972, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.5384615384615387e-05, | |
| "loss": 0.9573, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 1.6923076923076924e-05, | |
| "loss": 0.9277, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.8461538461538465e-05, | |
| "loss": 0.8686, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 2e-05, | |
| "loss": 0.9066, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.9999702094326033e-05, | |
| "loss": 0.9442, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 1.9998808395053687e-05, | |
| "loss": 0.8849, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.999731895543058e-05, | |
| "loss": 0.8767, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9995233864199213e-05, | |
| "loss": 0.9332, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 1.9992553245591694e-05, | |
| "loss": 0.9058, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9989277259322314e-05, | |
| "loss": 0.9563, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.998540610057806e-05, | |
| "loss": 0.9188, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9980940000006956e-05, | |
| "loss": 0.9445, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 1.997587922370434e-05, | |
| "loss": 0.8958, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.997022407319702e-05, | |
| "loss": 0.8969, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.9963974885425267e-05, | |
| "loss": 0.9201, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.9957132032722787e-05, | |
| "loss": 0.8686, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 1.9949695922794508e-05, | |
| "loss": 0.8845, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.99416669986923e-05, | |
| "loss": 0.9125, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9933045738788564e-05, | |
| "loss": 0.9097, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 1.992383265674776e-05, | |
| "loss": 0.886, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.991402830149576e-05, | |
| "loss": 0.9274, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9903633257187186e-05, | |
| "loss": 0.9153, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9892648143170565e-05, | |
| "loss": 0.9458, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 1.9881073613951464e-05, | |
| "loss": 0.9064, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.986891035915346e-05, | |
| "loss": 0.8758, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.9856159103477085e-05, | |
| "loss": 0.8764, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 1.984282060665662e-05, | |
| "loss": 0.9475, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.9828895663414838e-05, | |
| "loss": 0.883, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.9814385103415662e-05, | |
| "loss": 0.8835, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.9799289791214725e-05, | |
| "loss": 0.8706, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 1.9783610626207855e-05, | |
| "loss": 0.923, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.9767348542577496e-05, | |
| "loss": 0.8666, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.9750504509237046e-05, | |
| "loss": 0.882, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.9733079529773123e-05, | |
| "loss": 0.8919, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 1.9715074642385785e-05, | |
| "loss": 0.911, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.9696490919826647e-05, | |
| "loss": 0.9278, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.967732946933499e-05, | |
| "loss": 0.8796, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 1.965759143257178e-05, | |
| "loss": 0.9192, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.9637277985551643e-05, | |
| "loss": 0.8925, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.9616390338572805e-05, | |
| "loss": 0.9026, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.9594929736144978e-05, | |
| "loss": 0.8203, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 1.95728974569152e-05, | |
| "loss": 0.8867, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.9550294813591685e-05, | |
| "loss": 0.8783, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.9527123152865562e-05, | |
| "loss": 0.897, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 1.950338385533067e-05, | |
| "loss": 0.8365, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.9479078335401297e-05, | |
| "loss": 0.8951, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.9454208041227905e-05, | |
| "loss": 0.8633, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.9428774454610845e-05, | |
| "loss": 0.9022, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 1.940277909091206e-05, | |
| "loss": 0.885, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.937622349896483e-05, | |
| "loss": 0.8765, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.9349109260981455e-05, | |
| "loss": 0.8465, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.9321437992458996e-05, | |
| "loss": 0.8642, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.929321134208304e-05, | |
| "loss": 0.8872, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 1.9264430991629447e-05, | |
| "loss": 0.9043, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.9235098655864156e-05, | |
| "loss": 0.9398, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.920521608244102e-05, | |
| "loss": 0.9099, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 1.9174785051797668e-05, | |
| "loss": 0.8736, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.9143807377049443e-05, | |
| "loss": 0.7984, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.911228490388136e-05, | |
| "loss": 0.8401, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.9080219510438137e-05, | |
| "loss": 0.8782, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 1.9047613107212314e-05, | |
| "loss": 0.8569, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.9014467636930387e-05, | |
| "loss": 0.8467, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.8980785074437095e-05, | |
| "loss": 0.8492, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.8946567426577724e-05, | |
| "loss": 0.8786, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 1.8911816732078577e-05, | |
| "loss": 0.8782, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.8876535061425454e-05, | |
| "loss": 0.8979, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.884072451674034e-05, | |
| "loss": 0.906, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.880438723165612e-05, | |
| "loss": 0.9005, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.8767525371189473e-05, | |
| "loss": 0.828, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.8730141131611882e-05, | |
| "loss": 0.8725, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.869223674031876e-05, | |
| "loss": 0.8478, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 1.865381445569676e-05, | |
| "loss": 0.8932, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.861487656698919e-05, | |
| "loss": 0.8847, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.8575425394159653e-05, | |
| "loss": 0.9109, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 1.8535463287753797e-05, | |
| "loss": 0.8571, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.849499262875927e-05, | |
| "loss": 0.8681, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.845401582846385e-05, | |
| "loss": 0.8969, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.8412535328311813e-05, | |
| "loss": 0.8889, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 1.8370553599758424e-05, | |
| "loss": 0.8971, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.8328073144122708e-05, | |
| "loss": 0.8818, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.8285096492438424e-05, | |
| "loss": 0.8723, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.8241626205303245e-05, | |
| "loss": 0.8822, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 1.8197664872726206e-05, | |
| "loss": 0.852, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.8153215113973398e-05, | |
| "loss": 0.8946, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.810827957741188e-05, | |
| "loss": 0.8812, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 1.8062860940351916e-05, | |
| "loss": 0.8572, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.8016961908887444e-05, | |
| "loss": 0.8703, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.7970585217734843e-05, | |
| "loss": 0.8565, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.792373363007e-05, | |
| "loss": 0.8724, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 1.7876409937363677e-05, | |
| "loss": 0.8421, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.7828616959215185e-05, | |
| "loss": 0.8504, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.7780357543184396e-05, | |
| "loss": 0.8842, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.7731634564622087e-05, | |
| "loss": 0.8467, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.768245092649861e-05, | |
| "loss": 0.893, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.763280955923093e-05, | |
| "loss": 0.8401, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.7582713420508052e-05, | |
| "loss": 0.8824, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 1.7532165495114765e-05, | |
| "loss": 0.8969, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.748116879475383e-05, | |
| "loss": 0.8517, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.7429726357866516e-05, | |
| "loss": 0.9263, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.7377841249451596e-05, | |
| "loss": 0.8942, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 1.7325516560882706e-05, | |
| "loss": 0.8849, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.727275540972417e-05, | |
| "loss": 0.8786, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.7219560939545246e-05, | |
| "loss": 0.8361, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 1.7165936319732833e-05, | |
| "loss": 0.8518, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.711188474530263e-05, | |
| "loss": 0.8686, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.7057409436708783e-05, | |
| "loss": 0.8457, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.700251363965199e-05, | |
| "loss": 0.8331, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 1.6947200624886145e-05, | |
| "loss": 0.8336, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.6891473688023425e-05, | |
| "loss": 0.8896, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.6835336149337976e-05, | |
| "loss": 0.8698, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.677879135356805e-05, | |
| "loss": 0.8493, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.6721842669716752e-05, | |
| "loss": 0.8637, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.666449349085129e-05, | |
| "loss": 0.8614, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.6606747233900816e-05, | |
| "loss": 0.8878, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 1.6548607339452853e-05, | |
| "loss": 0.8685, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.6490077271548287e-05, | |
| "loss": 0.8429, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.6431160517474986e-05, | |
| "loss": 0.8828, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.637186058756001e-05, | |
| "loss": 0.8589, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.6312181014960483e-05, | |
| "loss": 0.864, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.6252125355453058e-05, | |
| "loss": 0.8906, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.619169718722209e-05, | |
| "loss": 0.854, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.6130900110646404e-05, | |
| "loss": 0.9064, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.6069737748084823e-05, | |
| "loss": 0.9017, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.600821374366031e-05, | |
| "loss": 0.8955, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.594633176304287e-05, | |
| "loss": 0.8569, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.5884095493231123e-05, | |
| "loss": 0.8699, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 1.582150864233266e-05, | |
| "loss": 0.8945, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.5758574939343073e-05, | |
| "loss": 0.8861, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.569529813392381e-05, | |
| "loss": 0.8806, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.5631681996178735e-05, | |
| "loss": 0.7215, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.01, | |
| "learning_rate": 1.5567730316429536e-05, | |
| "loss": 0.6521, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.5503446904989856e-05, | |
| "loss": 0.6706, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.54388355919383e-05, | |
| "loss": 0.6824, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "learning_rate": 1.537390022689022e-05, | |
| "loss": 0.6543, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 1.530864467876836e-05, | |
| "loss": 0.6559, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.5243072835572319e-05, | |
| "loss": 0.6764, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 1.5177188604146929e-05, | |
| "loss": 0.6662, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.06, | |
| "learning_rate": 1.5110995909949465e-05, | |
| "loss": 0.6668, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 1.504449869681576e-05, | |
| "loss": 0.6746, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.4977700926725231e-05, | |
| "loss": 0.6726, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "learning_rate": 1.4910606579564827e-05, | |
| "loss": 0.6261, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 1.4843219652891889e-05, | |
| "loss": 0.6561, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 1.4775544161695975e-05, | |
| "loss": 0.6725, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.4707584138159652e-05, | |
| "loss": 0.6421, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.11, | |
| "learning_rate": 1.4639343631418239e-05, | |
| "loss": 0.6568, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 1.457082670731857e-05, | |
| "loss": 0.6381, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 1.4502037448176734e-05, | |
| "loss": 0.6663, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 1.4432979952534853e-05, | |
| "loss": 0.6344, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.14, | |
| "learning_rate": 1.4363658334916883e-05, | |
| "loss": 0.6778, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.4294076725583463e-05, | |
| "loss": 0.6412, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 1.4224239270285847e-05, | |
| "loss": 0.6905, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.16, | |
| "learning_rate": 1.4154150130018867e-05, | |
| "loss": 0.6732, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 1.4083813480773036e-05, | |
| "loss": 0.6823, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.4013233513285734e-05, | |
| "loss": 0.6621, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.394241443279152e-05, | |
| "loss": 0.6802, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.19, | |
| "learning_rate": 1.3871360458771575e-05, | |
| "loss": 0.6327, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.38000758247023e-05, | |
| "loss": 0.6688, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.3728564777803089e-05, | |
| "loss": 0.6781, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.21, | |
| "learning_rate": 1.3656831578783263e-05, | |
| "loss": 0.6387, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 1.3584880501588225e-05, | |
| "loss": 0.6211, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.35127158331448e-05, | |
| "loss": 0.6674, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.3440341873105834e-05, | |
| "loss": 0.664, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.3367762933593989e-05, | |
| "loss": 0.6374, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.3294983338944842e-05, | |
| "loss": 0.7106, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.3222007425449234e-05, | |
| "loss": 0.6743, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.314883954109491e-05, | |
| "loss": 0.6266, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.27, | |
| "learning_rate": 1.3075484045307443e-05, | |
| "loss": 0.6409, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.3001945308690514e-05, | |
| "loss": 0.696, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.2928227712765504e-05, | |
| "loss": 0.6427, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.29, | |
| "learning_rate": 1.2854335649710436e-05, | |
| "loss": 0.687, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.2780273522098276e-05, | |
| "loss": 0.624, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.2706045742634637e-05, | |
| "loss": 0.6444, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.2631656733894842e-05, | |
| "loss": 0.64, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.32, | |
| "learning_rate": 1.2557110928060456e-05, | |
| "loss": 0.6345, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 1.2482412766655183e-05, | |
| "loss": 0.6863, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 1.2407566700280247e-05, | |
| "loss": 0.6546, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 1.34, | |
| "learning_rate": 1.2332577188349217e-05, | |
| "loss": 0.6767, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.2257448698822314e-05, | |
| "loss": 0.6265, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.2182185707940196e-05, | |
| "loss": 0.6039, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.2106792699957264e-05, | |
| "loss": 0.6537, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 1.37, | |
| "learning_rate": 1.2031274166874498e-05, | |
| "loss": 0.671, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.1955634608171792e-05, | |
| "loss": 0.6542, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.187987853053989e-05, | |
| "loss": 0.6243, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.1804010447611862e-05, | |
| "loss": 0.6399, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.1728034879694185e-05, | |
| "loss": 0.6114, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 1.4, | |
| "learning_rate": 1.1651956353497418e-05, | |
| "loss": 0.6876, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.1575779401866475e-05, | |
| "loss": 0.6567, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 1.42, | |
| "learning_rate": 1.1499508563510587e-05, | |
| "loss": 0.6602, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.1423148382732854e-05, | |
| "loss": 0.6618, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.1346703409159495e-05, | |
| "loss": 0.6235, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.1270178197468788e-05, | |
| "loss": 0.6743, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.1193577307119687e-05, | |
| "loss": 0.676, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.45, | |
| "learning_rate": 1.1116905302080163e-05, | |
| "loss": 0.6091, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.1040166750555288e-05, | |
| "loss": 0.6412, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.0963366224715035e-05, | |
| "loss": 0.6593, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 1.0886508300421892e-05, | |
| "loss": 0.6369, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "learning_rate": 1.080959755695821e-05, | |
| "loss": 0.7025, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.0732638576753355e-05, | |
| "loss": 0.6387, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 1.0655635945110705e-05, | |
| "loss": 0.6821, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "learning_rate": 1.0578594249934433e-05, | |
| "loss": 0.673, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.0501518081456164e-05, | |
| "loss": 0.6481, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.0424412031961485e-05, | |
| "loss": 0.6704, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.0347280695516319e-05, | |
| "loss": 0.6656, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "learning_rate": 1.0270128667693225e-05, | |
| "loss": 0.644, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 1.0192960545297568e-05, | |
| "loss": 0.6596, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.011578092609365e-05, | |
| "loss": 0.6377, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.0038594408530768e-05, | |
| "loss": 0.6317, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 9.96140559146923e-06, | |
| "loss": 0.6491, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 9.884219073906353e-06, | |
| "loss": 0.6474, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 9.807039454702436e-06, | |
| "loss": 0.6897, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 1.58, | |
| "learning_rate": 9.729871332306775e-06, | |
| "loss": 0.6109, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 9.652719304483683e-06, | |
| "loss": 0.6783, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 9.57558796803852e-06, | |
| "loss": 0.6471, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 9.498481918543836e-06, | |
| "loss": 0.6259, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 9.42140575006557e-06, | |
| "loss": 0.6444, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 9.344364054889298e-06, | |
| "loss": 0.6624, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 9.267361423246645e-06, | |
| "loss": 0.6863, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 1.63, | |
| "learning_rate": 9.190402443041792e-06, | |
| "loss": 0.643, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 9.11349169957811e-06, | |
| "loss": 0.6617, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 9.036633775284968e-06, | |
| "loss": 0.6689, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 8.959833249444715e-06, | |
| "loss": 0.6269, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 1.66, | |
| "learning_rate": 8.883094697919839e-06, | |
| "loss": 0.6601, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 8.806422692880318e-06, | |
| "loss": 0.6672, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 8.729821802531213e-06, | |
| "loss": 0.6126, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 1.68, | |
| "learning_rate": 8.653296590840509e-06, | |
| "loss": 0.6506, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 8.576851617267151e-06, | |
| "loss": 0.6503, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 8.500491436489413e-06, | |
| "loss": 0.6969, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 8.424220598133526e-06, | |
| "loss": 0.6609, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 1.71, | |
| "learning_rate": 8.348043646502588e-06, | |
| "loss": 0.648, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 8.271965120305815e-06, | |
| "loss": 0.7073, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 8.19598955238814e-06, | |
| "loss": 0.6661, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 8.120121469460114e-06, | |
| "loss": 0.727, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 8.04436539182821e-06, | |
| "loss": 0.6866, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 7.968725833125505e-06, | |
| "loss": 0.6556, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 7.89320730004274e-06, | |
| "loss": 0.6132, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "learning_rate": 7.81781429205981e-06, | |
| "loss": 0.6601, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 7.74255130117769e-06, | |
| "loss": 0.6627, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 7.667422811650786e-06, | |
| "loss": 0.6766, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 7.592433299719757e-06, | |
| "loss": 0.6407, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.79, | |
| "learning_rate": 7.51758723334482e-06, | |
| "loss": 0.6199, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 7.442889071939548e-06, | |
| "loss": 0.665, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 7.368343266105162e-06, | |
| "loss": 0.671, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.81, | |
| "learning_rate": 7.293954257365368e-06, | |
| "loss": 0.6747, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 7.2197264779017275e-06, | |
| "loss": 0.6633, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 7.145664350289566e-06, | |
| "loss": 0.6527, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 7.071772287234497e-06, | |
| "loss": 0.6978, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.84, | |
| "learning_rate": 6.998054691309489e-06, | |
| "loss": 0.6754, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 6.924515954692563e-06, | |
| "loss": 0.66, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 6.851160458905093e-06, | |
| "loss": 0.6229, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 6.777992574550767e-06, | |
| "loss": 0.6619, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "learning_rate": 6.705016661055162e-06, | |
| "loss": 0.6291, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 6.632237066406014e-06, | |
| "loss": 0.6353, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 6.559658126894169e-06, | |
| "loss": 0.6533, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.89, | |
| "learning_rate": 6.487284166855203e-06, | |
| "loss": 0.6381, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 6.4151194984117774e-06, | |
| "loss": 0.6156, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 6.343168421216741e-06, | |
| "loss": 0.6582, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 6.2714352221969155e-06, | |
| "loss": 0.6862, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 6.199924175297701e-06, | |
| "loss": 0.6487, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 6.128639541228427e-06, | |
| "loss": 0.6534, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 6.057585567208484e-06, | |
| "loss": 0.6827, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 5.986766486714268e-06, | |
| "loss": 0.6595, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 5.916186519226966e-06, | |
| "loss": 0.6754, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.95, | |
| "learning_rate": 5.845849869981137e-06, | |
| "loss": 0.6635, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 5.775760729714155e-06, | |
| "loss": 0.6604, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "learning_rate": 5.705923274416536e-06, | |
| "loss": 0.6956, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 5.636341665083121e-06, | |
| "loss": 0.6262, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 5.5670200474651505e-06, | |
| "loss": 0.6186, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 5.497962551823266e-06, | |
| "loss": 0.6558, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 5.429173292681433e-06, | |
| "loss": 0.7025, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "learning_rate": 5.3606563685817646e-06, | |
| "loss": 0.5535, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 5.29241586184035e-06, | |
| "loss": 0.5372, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "learning_rate": 5.224455838304028e-06, | |
| "loss": 0.5183, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 5.1567803471081164e-06, | |
| "loss": 0.5192, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 5.089393420435176e-06, | |
| "loss": 0.5072, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 5.022299073274769e-06, | |
| "loss": 0.5038, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 4.9555013031842445e-06, | |
| "loss": 0.5133, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 4.889004090050536e-06, | |
| "loss": 0.483, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 4.822811395853073e-06, | |
| "loss": 0.478, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 4.756927164427685e-06, | |
| "loss": 0.491, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 4.691355321231645e-06, | |
| "loss": 0.5153, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "learning_rate": 4.62609977310978e-06, | |
| "loss": 0.5066, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 4.561164408061703e-06, | |
| "loss": 0.495, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 4.496553095010147e-06, | |
| "loss": 0.5069, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 2.1, | |
| "learning_rate": 4.432269683570469e-06, | |
| "loss": 0.4724, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 4.368318003821266e-06, | |
| "loss": 0.4922, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 4.304701866076194e-06, | |
| "loss": 0.495, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 4.241425060656927e-06, | |
| "loss": 0.5082, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 2.13, | |
| "learning_rate": 4.178491357667342e-06, | |
| "loss": 0.4689, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 4.11590450676888e-06, | |
| "loss": 0.4802, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 4.053668236957135e-06, | |
| "loss": 0.4662, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 2.15, | |
| "learning_rate": 3.991786256339692e-06, | |
| "loss": 0.529, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 3.930262251915181e-06, | |
| "loss": 0.5224, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 3.869099889353597e-06, | |
| "loss": 0.5176, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 3.8083028127779143e-06, | |
| "loss": 0.5094, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 2.18, | |
| "learning_rate": 3.7478746445469415e-06, | |
| "loss": 0.4926, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 3.6878189850395186e-06, | |
| "loss": 0.4941, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 3.628139412439993e-06, | |
| "loss": 0.5487, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 3.5688394825250193e-06, | |
| "loss": 0.5081, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "learning_rate": 3.5099227284517145e-06, | |
| "loss": 0.4889, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 3.4513926605471504e-06, | |
| "loss": 0.4938, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 3.3932527660991877e-06, | |
| "loss": 0.4837, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 3.335506509148716e-06, | |
| "loss": 0.4979, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 3.2781573302832493e-06, | |
| "loss": 0.4936, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 3.221208646431949e-06, | |
| "loss": 0.4766, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 3.1646638506620265e-06, | |
| "loss": 0.5223, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "learning_rate": 3.108526311976574e-06, | |
| "loss": 0.498, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 3.0527993751138575e-06, | |
| "loss": 0.4948, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 2.997486360348011e-06, | |
| "loss": 0.4607, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 2.28, | |
| "learning_rate": 2.942590563291219e-06, | |
| "loss": 0.5286, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 2.888115254697371e-06, | |
| "loss": 0.5225, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 2.8340636802671716e-06, | |
| "loss": 0.4547, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 2.780439060454756e-06, | |
| "loss": 0.4879, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "learning_rate": 2.727244590275834e-06, | |
| "loss": 0.5063, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 2.674483439117296e-06, | |
| "loss": 0.5119, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 2.622158750548407e-06, | |
| "loss": 0.5264, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 2.5702736421334853e-06, | |
| "loss": 0.5035, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 2.34, | |
| "learning_rate": 2.518831205246174e-06, | |
| "loss": 0.5364, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 2.4678345048852326e-06, | |
| "loss": 0.4828, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 2.4172865794919477e-06, | |
| "loss": 0.4919, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 2.3671904407690704e-06, | |
| "loss": 0.5037, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 2.317549073501396e-06, | |
| "loss": 0.5387, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 2.268365435377915e-06, | |
| "loss": 0.4866, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 2.2196424568156073e-06, | |
| "loss": 0.5168, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 2.39, | |
| "learning_rate": 2.171383040784819e-06, | |
| "loss": 0.5497, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 2.123590062636328e-06, | |
| "loss": 0.4872, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 2.076266369930002e-06, | |
| "loss": 0.4733, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 2.02941478226516e-06, | |
| "loss": 0.4585, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 1.983038091112558e-06, | |
| "loss": 0.5143, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 1.9371390596480865e-06, | |
| "loss": 0.5329, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 1.8917204225881236e-06, | |
| "loss": 0.4934, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 2.44, | |
| "learning_rate": 1.8467848860266047e-06, | |
| "loss": 0.5128, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 1.8023351272737955e-06, | |
| "loss": 0.5159, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 1.7583737946967606e-06, | |
| "loss": 0.5194, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.7149035075615795e-06, | |
| "loss": 0.5457, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 1.6719268558772927e-06, | |
| "loss": 0.4861, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 2.47, | |
| "learning_rate": 1.6294464002415789e-06, | |
| "loss": 0.4779, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 1.587464671688187e-06, | |
| "loss": 0.4923, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 2.49, | |
| "learning_rate": 1.54598417153615e-06, | |
| "loss": 0.4961, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 1.5050073712407354e-06, | |
| "loss": 0.5059, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 1.464536712246205e-06, | |
| "loss": 0.504, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 1.4245746058403464e-06, | |
| "loss": 0.489, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 1.385123433010812e-06, | |
| "loss": 0.4703, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 2.52, | |
| "learning_rate": 1.3461855443032456e-06, | |
| "loss": 0.5223, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 1.3077632596812407e-06, | |
| "loss": 0.4904, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 1.2698588683881185e-06, | |
| "loss": 0.4623, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 1.2324746288105272e-06, | |
| "loss": 0.5149, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 2.55, | |
| "learning_rate": 1.1956127683438822e-06, | |
| "loss": 0.4923, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 1.1592754832596632e-06, | |
| "loss": 0.5164, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 1.1234649385745488e-06, | |
| "loss": 0.5093, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 2.57, | |
| "learning_rate": 1.0881832679214276e-06, | |
| "loss": 0.4929, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 1.0534325734222773e-06, | |
| "loss": 0.5419, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 1.0192149255629114e-06, | |
| "loss": 0.5164, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 9.855323630696146e-07, | |
| "loss": 0.4707, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 9.523868927876889e-07, | |
| "loss": 0.4744, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 9.197804895618623e-07, | |
| "loss": 0.4753, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 8.87715096118642e-07, | |
| "loss": 0.4882, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 2.62, | |
| "learning_rate": 8.561926229505601e-07, | |
| "loss": 0.4818, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 8.252149482023363e-07, | |
| "loss": 0.4743, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 7.947839175589845e-07, | |
| "loss": 0.4877, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 7.649013441358466e-07, | |
| "loss": 0.5093, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 2.65, | |
| "learning_rate": 7.355690083705547e-07, | |
| "loss": 0.4711, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 7.067886579169625e-07, | |
| "loss": 0.477, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 6.78562007541006e-07, | |
| "loss": 0.4992, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 6.508907390185504e-07, | |
| "loss": 0.4993, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 2.68, | |
| "learning_rate": 6.237765010351715e-07, | |
| "loss": 0.466, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 5.972209090879389e-07, | |
| "loss": 0.4727, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 5.71225545389158e-07, | |
| "loss": 0.4927, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 2.7, | |
| "learning_rate": 5.457919587720961e-07, | |
| "loss": 0.5099, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 5.209216645987036e-07, | |
| "loss": 0.5217, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 4.966161446693329e-07, | |
| "loss": 0.5118, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 4.728768471344425e-07, | |
| "loss": 0.5118, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 4.4970518640831687e-07, | |
| "loss": 0.5028, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 4.271025430847986e-07, | |
| "loss": 0.4986, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 4.0507026385502747e-07, | |
| "loss": 0.4744, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "learning_rate": 3.836096614271989e-07, | |
| "loss": 0.4928, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 3.6272201444836006e-07, | |
| "loss": 0.467, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 3.424085674282229e-07, | |
| "loss": 0.4756, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 3.226705306650113e-07, | |
| "loss": 0.5302, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 2.78, | |
| "learning_rate": 3.0350908017335423e-07, | |
| "loss": 0.4687, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 2.8492535761421635e-07, | |
| "loss": 0.4699, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 2.6692047022687684e-07, | |
| "loss": 0.472, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 2.494954907629565e-07, | |
| "loss": 0.5102, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 2.81, | |
| "learning_rate": 2.3265145742250694e-07, | |
| "loss": 0.4985, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 2.1638937379214852e-07, | |
| "loss": 0.4999, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 2.0071020878527857e-07, | |
| "loss": 0.4821, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 2.83, | |
| "learning_rate": 1.8561489658433963e-07, | |
| "loss": 0.5049, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 1.711043365851639e-07, | |
| "loss": 0.4672, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 1.5717939334338184e-07, | |
| "loss": 0.5159, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 1.4384089652291544e-07, | |
| "loss": 0.4481, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.86, | |
| "learning_rate": 1.310896408465401e-07, | |
| "loss": 0.4825, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 1.1892638604853901e-07, | |
| "loss": 0.4974, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 1.0735185682943628e-07, | |
| "loss": 0.5136, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 9.636674281281788e-08, | |
| "loss": 0.4991, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 2.89, | |
| "learning_rate": 8.597169850424136e-08, | |
| "loss": 0.4636, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 7.616734325224473e-08, | |
| "loss": 0.5009, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 6.69542612114371e-08, | |
| "loss": 0.4814, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 5.833300130770436e-08, | |
| "loss": 0.4765, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 5.030407720549413e-08, | |
| "loss": 0.4909, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 4.286796727721476e-08, | |
| "loss": 0.4783, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 3.602511457473479e-08, | |
| "loss": 0.493, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 2.94, | |
| "learning_rate": 2.9775926802984022e-08, | |
| "loss": 0.4648, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 2.4120776295659675e-08, | |
| "loss": 0.4917, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 1.905999999304853e-08, | |
| "loss": 0.4844, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "learning_rate": 1.4593899421943003e-08, | |
| "loss": 0.4925, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 1.0722740677685529e-08, | |
| "loss": 0.4803, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 7.4467544083067776e-09, | |
| "loss": 0.4579, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 4.766135800785554e-09, | |
| "loss": 0.4739, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 2.99, | |
| "learning_rate": 2.68104456942031e-09, | |
| "loss": 0.5126, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 1.1916049463134293e-09, | |
| "loss": 0.4761, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 420, | |
| "total_flos": 1.7711353317818368e+17, | |
| "train_loss": 0.6881538674944923, | |
| "train_runtime": 11850.5144, | |
| "train_samples_per_second": 18.172, | |
| "train_steps_per_second": 0.035 | |
| } | |
| ], | |
| "max_steps": 420, | |
| "num_train_epochs": 3, | |
| "total_flos": 1.7711353317818368e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |