diff --git "a/checkpoint-9250/trainer_state.json" "b/checkpoint-9250/trainer_state.json" new file mode 100644--- /dev/null +++ "b/checkpoint-9250/trainer_state.json" @@ -0,0 +1,64783 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 7.388411588411588, + "eval_steps": 500, + "global_step": 9250, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0007992007992007992, + "grad_norm": 2.515625, + "learning_rate": 0.0002, + "loss": 3.2116, + "step": 1 + }, + { + "epoch": 0.0015984015984015984, + "grad_norm": 3.046875, + "learning_rate": 0.0002, + "loss": 2.9686, + "step": 2 + }, + { + "epoch": 0.0023976023976023976, + "grad_norm": 2.703125, + "learning_rate": 0.0002, + "loss": 2.734, + "step": 3 + }, + { + "epoch": 0.003196803196803197, + "grad_norm": 4.5625, + "learning_rate": 0.0002, + "loss": 2.4344, + "step": 4 + }, + { + "epoch": 0.003996003996003996, + "grad_norm": 3.5, + "learning_rate": 0.0002, + "loss": 2.1558, + "step": 5 + }, + { + "epoch": 0.004795204795204795, + "grad_norm": 4.5625, + "learning_rate": 0.0002, + "loss": 2.0167, + "step": 6 + }, + { + "epoch": 0.005594405594405594, + "grad_norm": 0.75390625, + "learning_rate": 0.0002, + "loss": 1.9392, + "step": 7 + }, + { + "epoch": 0.006393606393606394, + "grad_norm": 5.90625, + "learning_rate": 0.0002, + "loss": 1.9313, + "step": 8 + }, + { + "epoch": 0.007192807192807193, + "grad_norm": 2.96875, + "learning_rate": 0.0002, + "loss": 1.9121, + "step": 9 + }, + { + "epoch": 0.007992007992007992, + "grad_norm": 8.125, + "learning_rate": 0.0002, + "loss": 1.9119, + "step": 10 + }, + { + "epoch": 0.008791208791208791, + "grad_norm": 10.0625, + "learning_rate": 0.0002, + "loss": 1.9204, + "step": 11 + }, + { + "epoch": 0.00959040959040959, + "grad_norm": 4.65625, + "learning_rate": 0.0002, + "loss": 1.8705, + "step": 12 + }, + { + "epoch": 0.01038961038961039, + "grad_norm": 7.5625, + "learning_rate": 0.0002, + "loss": 1.8554, + "step": 13 + }, + { + "epoch": 0.011188811188811189, + "grad_norm": 10.875, + "learning_rate": 0.0002, + "loss": 1.851, + "step": 14 + }, + { + "epoch": 0.011988011988011988, + "grad_norm": 8.5625, + "learning_rate": 0.0002, + "loss": 1.8158, + "step": 15 + }, + { + "epoch": 0.012787212787212787, + "grad_norm": 1.28125, + "learning_rate": 0.0002, + "loss": 1.7605, + "step": 16 + }, + { + "epoch": 0.013586413586413586, + "grad_norm": 10.25, + "learning_rate": 0.0002, + "loss": 1.7539, + "step": 17 + }, + { + "epoch": 0.014385614385614386, + "grad_norm": 12.3125, + "learning_rate": 0.0002, + "loss": 1.7519, + "step": 18 + }, + { + "epoch": 0.015184815184815185, + "grad_norm": 10.625, + "learning_rate": 0.0002, + "loss": 1.7278, + "step": 19 + }, + { + "epoch": 0.015984015984015984, + "grad_norm": 6.15625, + "learning_rate": 0.0002, + "loss": 1.6722, + "step": 20 + }, + { + "epoch": 0.016783216783216783, + "grad_norm": 1.03125, + "learning_rate": 0.0002, + "loss": 1.6398, + "step": 21 + }, + { + "epoch": 0.017582417582417582, + "grad_norm": 1.2265625, + "learning_rate": 0.0002, + "loss": 1.6283, + "step": 22 + }, + { + "epoch": 0.01838161838161838, + "grad_norm": 0.8515625, + "learning_rate": 0.0002, + "loss": 1.6145, + "step": 23 + }, + { + "epoch": 0.01918081918081918, + "grad_norm": 1.0625, + "learning_rate": 0.0002, + "loss": 1.59, + "step": 24 + }, + { + "epoch": 0.01998001998001998, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 1.566, + "step": 25 + }, + { + "epoch": 0.02077922077922078, + "grad_norm": 0.71484375, + "learning_rate": 0.0002, + "loss": 1.566, + "step": 26 + }, + { + "epoch": 0.02157842157842158, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 1.5317, + "step": 27 + }, + { + "epoch": 0.022377622377622378, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 1.5039, + "step": 28 + }, + { + "epoch": 0.023176823176823177, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 1.4993, + "step": 29 + }, + { + "epoch": 0.023976023976023976, + "grad_norm": 0.71484375, + "learning_rate": 0.0002, + "loss": 1.4725, + "step": 30 + }, + { + "epoch": 0.024775224775224775, + "grad_norm": 0.859375, + "learning_rate": 0.0002, + "loss": 1.4626, + "step": 31 + }, + { + "epoch": 0.025574425574425574, + "grad_norm": 0.96484375, + "learning_rate": 0.0002, + "loss": 1.4509, + "step": 32 + }, + { + "epoch": 0.026373626373626374, + "grad_norm": 1.1640625, + "learning_rate": 0.0002, + "loss": 1.4543, + "step": 33 + }, + { + "epoch": 0.027172827172827173, + "grad_norm": 0.85546875, + "learning_rate": 0.0002, + "loss": 1.4315, + "step": 34 + }, + { + "epoch": 0.027972027972027972, + "grad_norm": 1.0859375, + "learning_rate": 0.0002, + "loss": 1.4186, + "step": 35 + }, + { + "epoch": 0.02877122877122877, + "grad_norm": 0.75, + "learning_rate": 0.0002, + "loss": 1.404, + "step": 36 + }, + { + "epoch": 0.02957042957042957, + "grad_norm": 0.73828125, + "learning_rate": 0.0002, + "loss": 1.4039, + "step": 37 + }, + { + "epoch": 0.03036963036963037, + "grad_norm": 0.640625, + "learning_rate": 0.0002, + "loss": 1.3757, + "step": 38 + }, + { + "epoch": 0.03116883116883117, + "grad_norm": 0.69140625, + "learning_rate": 0.0002, + "loss": 1.365, + "step": 39 + }, + { + "epoch": 0.03196803196803197, + "grad_norm": 0.6328125, + "learning_rate": 0.0002, + "loss": 1.3511, + "step": 40 + }, + { + "epoch": 0.03276723276723277, + "grad_norm": 0.734375, + "learning_rate": 0.0002, + "loss": 1.3416, + "step": 41 + }, + { + "epoch": 0.033566433566433566, + "grad_norm": 0.765625, + "learning_rate": 0.0002, + "loss": 1.3243, + "step": 42 + }, + { + "epoch": 0.03436563436563437, + "grad_norm": 0.890625, + "learning_rate": 0.0002, + "loss": 1.3228, + "step": 43 + }, + { + "epoch": 0.035164835164835165, + "grad_norm": 1.0625, + "learning_rate": 0.0002, + "loss": 1.3174, + "step": 44 + }, + { + "epoch": 0.03596403596403597, + "grad_norm": 1.03125, + "learning_rate": 0.0002, + "loss": 1.2993, + "step": 45 + }, + { + "epoch": 0.03676323676323676, + "grad_norm": 1.0234375, + "learning_rate": 0.0002, + "loss": 1.2973, + "step": 46 + }, + { + "epoch": 0.037562437562437566, + "grad_norm": 0.90625, + "learning_rate": 0.0002, + "loss": 1.2756, + "step": 47 + }, + { + "epoch": 0.03836163836163836, + "grad_norm": 0.84375, + "learning_rate": 0.0002, + "loss": 1.2762, + "step": 48 + }, + { + "epoch": 0.039160839160839164, + "grad_norm": 0.828125, + "learning_rate": 0.0002, + "loss": 1.2659, + "step": 49 + }, + { + "epoch": 0.03996003996003996, + "grad_norm": 0.796875, + "learning_rate": 0.0002, + "loss": 1.2436, + "step": 50 + }, + { + "epoch": 0.04075924075924076, + "grad_norm": 0.80078125, + "learning_rate": 0.0002, + "loss": 1.2321, + "step": 51 + }, + { + "epoch": 0.04155844155844156, + "grad_norm": 0.828125, + "learning_rate": 0.0002, + "loss": 1.2447, + "step": 52 + }, + { + "epoch": 0.04235764235764236, + "grad_norm": 0.85546875, + "learning_rate": 0.0002, + "loss": 1.2188, + "step": 53 + }, + { + "epoch": 0.04315684315684316, + "grad_norm": 1.015625, + "learning_rate": 0.0002, + "loss": 1.2265, + "step": 54 + }, + { + "epoch": 0.04395604395604396, + "grad_norm": 1.25, + "learning_rate": 0.0002, + "loss": 1.2319, + "step": 55 + }, + { + "epoch": 0.044755244755244755, + "grad_norm": 0.84375, + "learning_rate": 0.0002, + "loss": 1.2058, + "step": 56 + }, + { + "epoch": 0.04555444555444556, + "grad_norm": 1.125, + "learning_rate": 0.0002, + "loss": 1.2101, + "step": 57 + }, + { + "epoch": 0.046353646353646354, + "grad_norm": 0.82421875, + "learning_rate": 0.0002, + "loss": 1.1909, + "step": 58 + }, + { + "epoch": 0.047152847152847156, + "grad_norm": 0.95703125, + "learning_rate": 0.0002, + "loss": 1.2001, + "step": 59 + }, + { + "epoch": 0.04795204795204795, + "grad_norm": 0.9609375, + "learning_rate": 0.0002, + "loss": 1.1865, + "step": 60 + }, + { + "epoch": 0.048751248751248755, + "grad_norm": 1.09375, + "learning_rate": 0.0002, + "loss": 1.1945, + "step": 61 + }, + { + "epoch": 0.04955044955044955, + "grad_norm": 1.03125, + "learning_rate": 0.0002, + "loss": 1.1705, + "step": 62 + }, + { + "epoch": 0.05034965034965035, + "grad_norm": 1.484375, + "learning_rate": 0.0002, + "loss": 1.1735, + "step": 63 + }, + { + "epoch": 0.05114885114885115, + "grad_norm": 0.8359375, + "learning_rate": 0.0002, + "loss": 1.1707, + "step": 64 + }, + { + "epoch": 0.05194805194805195, + "grad_norm": 1.1484375, + "learning_rate": 0.0002, + "loss": 1.184, + "step": 65 + }, + { + "epoch": 0.05274725274725275, + "grad_norm": 0.7421875, + "learning_rate": 0.0002, + "loss": 1.1475, + "step": 66 + }, + { + "epoch": 0.05354645354645355, + "grad_norm": 0.953125, + "learning_rate": 0.0002, + "loss": 1.1537, + "step": 67 + }, + { + "epoch": 0.054345654345654346, + "grad_norm": 0.921875, + "learning_rate": 0.0002, + "loss": 1.151, + "step": 68 + }, + { + "epoch": 0.05514485514485515, + "grad_norm": 1.03125, + "learning_rate": 0.0002, + "loss": 1.1522, + "step": 69 + }, + { + "epoch": 0.055944055944055944, + "grad_norm": 0.98828125, + "learning_rate": 0.0002, + "loss": 1.1533, + "step": 70 + }, + { + "epoch": 0.05674325674325675, + "grad_norm": 0.86328125, + "learning_rate": 0.0002, + "loss": 1.1393, + "step": 71 + }, + { + "epoch": 0.05754245754245754, + "grad_norm": 0.9140625, + "learning_rate": 0.0002, + "loss": 1.1469, + "step": 72 + }, + { + "epoch": 0.058341658341658345, + "grad_norm": 0.85546875, + "learning_rate": 0.0002, + "loss": 1.1306, + "step": 73 + }, + { + "epoch": 0.05914085914085914, + "grad_norm": 0.953125, + "learning_rate": 0.0002, + "loss": 1.1305, + "step": 74 + }, + { + "epoch": 0.059940059940059943, + "grad_norm": 1.1171875, + "learning_rate": 0.0002, + "loss": 1.1233, + "step": 75 + }, + { + "epoch": 0.06073926073926074, + "grad_norm": 0.80078125, + "learning_rate": 0.0002, + "loss": 1.1207, + "step": 76 + }, + { + "epoch": 0.06153846153846154, + "grad_norm": 1.15625, + "learning_rate": 0.0002, + "loss": 1.1107, + "step": 77 + }, + { + "epoch": 0.06233766233766234, + "grad_norm": 0.6640625, + "learning_rate": 0.0002, + "loss": 1.1134, + "step": 78 + }, + { + "epoch": 0.06313686313686313, + "grad_norm": 0.7890625, + "learning_rate": 0.0002, + "loss": 1.1057, + "step": 79 + }, + { + "epoch": 0.06393606393606394, + "grad_norm": 0.71875, + "learning_rate": 0.0002, + "loss": 1.1146, + "step": 80 + }, + { + "epoch": 0.06473526473526474, + "grad_norm": 0.66796875, + "learning_rate": 0.0002, + "loss": 1.1159, + "step": 81 + }, + { + "epoch": 0.06553446553446554, + "grad_norm": 0.62890625, + "learning_rate": 0.0002, + "loss": 1.0952, + "step": 82 + }, + { + "epoch": 0.06633366633366633, + "grad_norm": 0.6328125, + "learning_rate": 0.0002, + "loss": 1.0873, + "step": 83 + }, + { + "epoch": 0.06713286713286713, + "grad_norm": 0.58203125, + "learning_rate": 0.0002, + "loss": 1.0875, + "step": 84 + }, + { + "epoch": 0.06793206793206794, + "grad_norm": 0.59765625, + "learning_rate": 0.0002, + "loss": 1.0839, + "step": 85 + }, + { + "epoch": 0.06873126873126874, + "grad_norm": 0.6640625, + "learning_rate": 0.0002, + "loss": 1.081, + "step": 86 + }, + { + "epoch": 0.06953046953046953, + "grad_norm": 0.765625, + "learning_rate": 0.0002, + "loss": 1.0859, + "step": 87 + }, + { + "epoch": 0.07032967032967033, + "grad_norm": 1.1015625, + "learning_rate": 0.0002, + "loss": 1.0902, + "step": 88 + }, + { + "epoch": 0.07112887112887113, + "grad_norm": 1.6328125, + "learning_rate": 0.0002, + "loss": 1.0967, + "step": 89 + }, + { + "epoch": 0.07192807192807193, + "grad_norm": 0.89453125, + "learning_rate": 0.0002, + "loss": 1.0817, + "step": 90 + }, + { + "epoch": 0.07272727272727272, + "grad_norm": 1.828125, + "learning_rate": 0.0002, + "loss": 1.088, + "step": 91 + }, + { + "epoch": 0.07352647352647353, + "grad_norm": 0.9609375, + "learning_rate": 0.0002, + "loss": 1.0768, + "step": 92 + }, + { + "epoch": 0.07432567432567433, + "grad_norm": 2.046875, + "learning_rate": 0.0002, + "loss": 1.0927, + "step": 93 + }, + { + "epoch": 0.07512487512487513, + "grad_norm": 1.375, + "learning_rate": 0.0002, + "loss": 1.0891, + "step": 94 + }, + { + "epoch": 0.07592407592407592, + "grad_norm": 1.703125, + "learning_rate": 0.0002, + "loss": 1.0823, + "step": 95 + }, + { + "epoch": 0.07672327672327672, + "grad_norm": 1.28125, + "learning_rate": 0.0002, + "loss": 1.0898, + "step": 96 + }, + { + "epoch": 0.07752247752247753, + "grad_norm": 1.1953125, + "learning_rate": 0.0002, + "loss": 1.0714, + "step": 97 + }, + { + "epoch": 0.07832167832167833, + "grad_norm": 1.1484375, + "learning_rate": 0.0002, + "loss": 1.0757, + "step": 98 + }, + { + "epoch": 0.07912087912087912, + "grad_norm": 1.1875, + "learning_rate": 0.0002, + "loss": 1.0757, + "step": 99 + }, + { + "epoch": 0.07992007992007992, + "grad_norm": 1.15625, + "learning_rate": 0.0002, + "loss": 1.0604, + "step": 100 + }, + { + "epoch": 0.08071928071928072, + "grad_norm": 1.03125, + "learning_rate": 0.0002, + "loss": 1.0717, + "step": 101 + }, + { + "epoch": 0.08151848151848153, + "grad_norm": 1.0546875, + "learning_rate": 0.0002, + "loss": 1.0629, + "step": 102 + }, + { + "epoch": 0.08231768231768231, + "grad_norm": 0.94140625, + "learning_rate": 0.0002, + "loss": 1.0499, + "step": 103 + }, + { + "epoch": 0.08311688311688312, + "grad_norm": 0.9765625, + "learning_rate": 0.0002, + "loss": 1.0726, + "step": 104 + }, + { + "epoch": 0.08391608391608392, + "grad_norm": 1.0078125, + "learning_rate": 0.0002, + "loss": 1.0493, + "step": 105 + }, + { + "epoch": 0.08471528471528472, + "grad_norm": 0.890625, + "learning_rate": 0.0002, + "loss": 1.0698, + "step": 106 + }, + { + "epoch": 0.08551448551448551, + "grad_norm": 0.78125, + "learning_rate": 0.0002, + "loss": 1.0598, + "step": 107 + }, + { + "epoch": 0.08631368631368631, + "grad_norm": 0.70703125, + "learning_rate": 0.0002, + "loss": 1.06, + "step": 108 + }, + { + "epoch": 0.08711288711288712, + "grad_norm": 0.625, + "learning_rate": 0.0002, + "loss": 1.046, + "step": 109 + }, + { + "epoch": 0.08791208791208792, + "grad_norm": 0.66015625, + "learning_rate": 0.0002, + "loss": 1.066, + "step": 110 + }, + { + "epoch": 0.08871128871128871, + "grad_norm": 0.59765625, + "learning_rate": 0.0002, + "loss": 1.0511, + "step": 111 + }, + { + "epoch": 0.08951048951048951, + "grad_norm": 0.703125, + "learning_rate": 0.0002, + "loss": 1.0463, + "step": 112 + }, + { + "epoch": 0.09030969030969031, + "grad_norm": 0.8359375, + "learning_rate": 0.0002, + "loss": 1.0591, + "step": 113 + }, + { + "epoch": 0.09110889110889112, + "grad_norm": 0.61328125, + "learning_rate": 0.0002, + "loss": 1.0349, + "step": 114 + }, + { + "epoch": 0.0919080919080919, + "grad_norm": 0.59375, + "learning_rate": 0.0002, + "loss": 1.0605, + "step": 115 + }, + { + "epoch": 0.09270729270729271, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 1.0461, + "step": 116 + }, + { + "epoch": 0.09350649350649351, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 1.0437, + "step": 117 + }, + { + "epoch": 0.09430569430569431, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 1.0402, + "step": 118 + }, + { + "epoch": 0.0951048951048951, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 1.0477, + "step": 119 + }, + { + "epoch": 0.0959040959040959, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 1.0404, + "step": 120 + }, + { + "epoch": 0.0967032967032967, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 1.0353, + "step": 121 + }, + { + "epoch": 0.09750249750249751, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 1.0339, + "step": 122 + }, + { + "epoch": 0.0983016983016983, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 1.0462, + "step": 123 + }, + { + "epoch": 0.0991008991008991, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 1.0377, + "step": 124 + }, + { + "epoch": 0.0999000999000999, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 1.0252, + "step": 125 + }, + { + "epoch": 0.1006993006993007, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 1.0261, + "step": 126 + }, + { + "epoch": 0.1014985014985015, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 1.0389, + "step": 127 + }, + { + "epoch": 0.1022977022977023, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 1.027, + "step": 128 + }, + { + "epoch": 0.1030969030969031, + "grad_norm": 0.6171875, + "learning_rate": 0.0002, + "loss": 1.0256, + "step": 129 + }, + { + "epoch": 0.1038961038961039, + "grad_norm": 0.88671875, + "learning_rate": 0.0002, + "loss": 1.0353, + "step": 130 + }, + { + "epoch": 0.10469530469530469, + "grad_norm": 1.1484375, + "learning_rate": 0.0002, + "loss": 1.0371, + "step": 131 + }, + { + "epoch": 0.1054945054945055, + "grad_norm": 0.6484375, + "learning_rate": 0.0002, + "loss": 1.0318, + "step": 132 + }, + { + "epoch": 0.1062937062937063, + "grad_norm": 0.7734375, + "learning_rate": 0.0002, + "loss": 1.0355, + "step": 133 + }, + { + "epoch": 0.1070929070929071, + "grad_norm": 0.64453125, + "learning_rate": 0.0002, + "loss": 1.0378, + "step": 134 + }, + { + "epoch": 0.10789210789210789, + "grad_norm": 0.734375, + "learning_rate": 0.0002, + "loss": 1.0231, + "step": 135 + }, + { + "epoch": 0.10869130869130869, + "grad_norm": 0.80078125, + "learning_rate": 0.0002, + "loss": 1.0222, + "step": 136 + }, + { + "epoch": 0.1094905094905095, + "grad_norm": 1.1640625, + "learning_rate": 0.0002, + "loss": 1.0841, + "step": 137 + }, + { + "epoch": 0.1102897102897103, + "grad_norm": 1.640625, + "learning_rate": 0.0002, + "loss": 1.0375, + "step": 138 + }, + { + "epoch": 0.11108891108891109, + "grad_norm": 0.90234375, + "learning_rate": 0.0002, + "loss": 1.0293, + "step": 139 + }, + { + "epoch": 0.11188811188811189, + "grad_norm": 1.4140625, + "learning_rate": 0.0002, + "loss": 1.0401, + "step": 140 + }, + { + "epoch": 0.11268731268731269, + "grad_norm": 1.203125, + "learning_rate": 0.0002, + "loss": 1.0388, + "step": 141 + }, + { + "epoch": 0.1134865134865135, + "grad_norm": 1.5234375, + "learning_rate": 0.0002, + "loss": 1.0298, + "step": 142 + }, + { + "epoch": 0.11428571428571428, + "grad_norm": 0.859375, + "learning_rate": 0.0002, + "loss": 1.0251, + "step": 143 + }, + { + "epoch": 0.11508491508491508, + "grad_norm": 1.4921875, + "learning_rate": 0.0002, + "loss": 1.0393, + "step": 144 + }, + { + "epoch": 0.11588411588411589, + "grad_norm": 0.859375, + "learning_rate": 0.0002, + "loss": 1.017, + "step": 145 + }, + { + "epoch": 0.11668331668331669, + "grad_norm": 1.421875, + "learning_rate": 0.0002, + "loss": 1.0405, + "step": 146 + }, + { + "epoch": 0.11748251748251748, + "grad_norm": 0.8671875, + "learning_rate": 0.0002, + "loss": 1.0274, + "step": 147 + }, + { + "epoch": 0.11828171828171828, + "grad_norm": 1.4921875, + "learning_rate": 0.0002, + "loss": 1.0285, + "step": 148 + }, + { + "epoch": 0.11908091908091908, + "grad_norm": 1.015625, + "learning_rate": 0.0002, + "loss": 1.0318, + "step": 149 + }, + { + "epoch": 0.11988011988011989, + "grad_norm": 1.640625, + "learning_rate": 0.0002, + "loss": 1.0365, + "step": 150 + }, + { + "epoch": 0.12067932067932068, + "grad_norm": 1.171875, + "learning_rate": 0.0002, + "loss": 1.0278, + "step": 151 + }, + { + "epoch": 0.12147852147852148, + "grad_norm": 1.2578125, + "learning_rate": 0.0002, + "loss": 1.0288, + "step": 152 + }, + { + "epoch": 0.12227772227772228, + "grad_norm": 1.09375, + "learning_rate": 0.0002, + "loss": 1.0322, + "step": 153 + }, + { + "epoch": 0.12307692307692308, + "grad_norm": 1.1640625, + "learning_rate": 0.0002, + "loss": 1.019, + "step": 154 + }, + { + "epoch": 0.12387612387612387, + "grad_norm": 1.1328125, + "learning_rate": 0.0002, + "loss": 1.0248, + "step": 155 + }, + { + "epoch": 0.12467532467532468, + "grad_norm": 0.96484375, + "learning_rate": 0.0002, + "loss": 1.0256, + "step": 156 + }, + { + "epoch": 0.12547452547452548, + "grad_norm": 0.84375, + "learning_rate": 0.0002, + "loss": 1.0221, + "step": 157 + }, + { + "epoch": 0.12627372627372627, + "grad_norm": 0.875, + "learning_rate": 0.0002, + "loss": 1.0258, + "step": 158 + }, + { + "epoch": 0.12707292707292708, + "grad_norm": 0.75390625, + "learning_rate": 0.0002, + "loss": 1.0205, + "step": 159 + }, + { + "epoch": 0.12787212787212787, + "grad_norm": 0.75, + "learning_rate": 0.0002, + "loss": 1.0135, + "step": 160 + }, + { + "epoch": 0.12867132867132866, + "grad_norm": 0.67578125, + "learning_rate": 0.0002, + "loss": 1.0225, + "step": 161 + }, + { + "epoch": 0.12947052947052948, + "grad_norm": 0.66796875, + "learning_rate": 0.0002, + "loss": 1.0249, + "step": 162 + }, + { + "epoch": 0.13026973026973027, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 1.0221, + "step": 163 + }, + { + "epoch": 0.13106893106893108, + "grad_norm": 0.69921875, + "learning_rate": 0.0002, + "loss": 1.0263, + "step": 164 + }, + { + "epoch": 0.13186813186813187, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 1.0179, + "step": 165 + }, + { + "epoch": 0.13266733266733266, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 1.0019, + "step": 166 + }, + { + "epoch": 0.13346653346653348, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 1.0215, + "step": 167 + }, + { + "epoch": 0.13426573426573427, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 1.0191, + "step": 168 + }, + { + "epoch": 0.13506493506493505, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 1.0083, + "step": 169 + }, + { + "epoch": 0.13586413586413587, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 1.0227, + "step": 170 + }, + { + "epoch": 0.13666333666333666, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 1.0147, + "step": 171 + }, + { + "epoch": 0.13746253746253748, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 1.021, + "step": 172 + }, + { + "epoch": 0.13826173826173827, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 1.0158, + "step": 173 + }, + { + "epoch": 0.13906093906093905, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 1.01, + "step": 174 + }, + { + "epoch": 0.13986013986013987, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 1.0202, + "step": 175 + }, + { + "epoch": 0.14065934065934066, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 1.0105, + "step": 176 + }, + { + "epoch": 0.14145854145854145, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 1.0117, + "step": 177 + }, + { + "epoch": 0.14225774225774226, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 1.0192, + "step": 178 + }, + { + "epoch": 0.14305694305694305, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 1.0107, + "step": 179 + }, + { + "epoch": 0.14385614385614387, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 1.0049, + "step": 180 + }, + { + "epoch": 0.14465534465534466, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 1.0037, + "step": 181 + }, + { + "epoch": 0.14545454545454545, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9951, + "step": 182 + }, + { + "epoch": 0.14625374625374626, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 1.0104, + "step": 183 + }, + { + "epoch": 0.14705294705294705, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 1.0101, + "step": 184 + }, + { + "epoch": 0.14785214785214784, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 1.0091, + "step": 185 + }, + { + "epoch": 0.14865134865134866, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 1.0074, + "step": 186 + }, + { + "epoch": 0.14945054945054945, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 1.0123, + "step": 187 + }, + { + "epoch": 0.15024975024975026, + "grad_norm": 0.73046875, + "learning_rate": 0.0002, + "loss": 1.0396, + "step": 188 + }, + { + "epoch": 0.15104895104895105, + "grad_norm": 0.734375, + "learning_rate": 0.0002, + "loss": 1.0196, + "step": 189 + }, + { + "epoch": 0.15184815184815184, + "grad_norm": 0.796875, + "learning_rate": 0.0002, + "loss": 1.0079, + "step": 190 + }, + { + "epoch": 0.15264735264735266, + "grad_norm": 3.484375, + "learning_rate": 0.0002, + "loss": 1.0459, + "step": 191 + }, + { + "epoch": 0.15344655344655345, + "grad_norm": 0.83984375, + "learning_rate": 0.0002, + "loss": 1.0136, + "step": 192 + }, + { + "epoch": 0.15424575424575424, + "grad_norm": 2.296875, + "learning_rate": 0.0002, + "loss": 1.0378, + "step": 193 + }, + { + "epoch": 0.15504495504495505, + "grad_norm": 1.1015625, + "learning_rate": 0.0002, + "loss": 1.0416, + "step": 194 + }, + { + "epoch": 0.15584415584415584, + "grad_norm": 1.9140625, + "learning_rate": 0.0002, + "loss": 1.0514, + "step": 195 + }, + { + "epoch": 0.15664335664335666, + "grad_norm": 1.0390625, + "learning_rate": 0.0002, + "loss": 1.0412, + "step": 196 + }, + { + "epoch": 0.15744255744255745, + "grad_norm": 3.171875, + "learning_rate": 0.0002, + "loss": 1.0968, + "step": 197 + }, + { + "epoch": 0.15824175824175823, + "grad_norm": 2.546875, + "learning_rate": 0.0002, + "loss": 1.0774, + "step": 198 + }, + { + "epoch": 0.15904095904095905, + "grad_norm": 1.7890625, + "learning_rate": 0.0002, + "loss": 1.0554, + "step": 199 + }, + { + "epoch": 0.15984015984015984, + "grad_norm": 1.46875, + "learning_rate": 0.0002, + "loss": 1.064, + "step": 200 + }, + { + "epoch": 0.16063936063936063, + "grad_norm": 1.3203125, + "learning_rate": 0.0002, + "loss": 1.0419, + "step": 201 + }, + { + "epoch": 0.16143856143856145, + "grad_norm": 1.1953125, + "learning_rate": 0.0002, + "loss": 1.0409, + "step": 202 + }, + { + "epoch": 0.16223776223776223, + "grad_norm": 1.0625, + "learning_rate": 0.0002, + "loss": 1.0371, + "step": 203 + }, + { + "epoch": 0.16303696303696305, + "grad_norm": 0.96875, + "learning_rate": 0.0002, + "loss": 1.0238, + "step": 204 + }, + { + "epoch": 0.16383616383616384, + "grad_norm": 0.97265625, + "learning_rate": 0.0002, + "loss": 1.0403, + "step": 205 + }, + { + "epoch": 0.16463536463536463, + "grad_norm": 1.0703125, + "learning_rate": 0.0002, + "loss": 1.0403, + "step": 206 + }, + { + "epoch": 0.16543456543456544, + "grad_norm": 0.95703125, + "learning_rate": 0.0002, + "loss": 1.0254, + "step": 207 + }, + { + "epoch": 0.16623376623376623, + "grad_norm": 0.8984375, + "learning_rate": 0.0002, + "loss": 1.033, + "step": 208 + }, + { + "epoch": 0.16703296703296702, + "grad_norm": 0.79296875, + "learning_rate": 0.0002, + "loss": 1.0272, + "step": 209 + }, + { + "epoch": 0.16783216783216784, + "grad_norm": 0.69921875, + "learning_rate": 0.0002, + "loss": 1.0248, + "step": 210 + }, + { + "epoch": 0.16863136863136863, + "grad_norm": 0.6171875, + "learning_rate": 0.0002, + "loss": 1.0287, + "step": 211 + }, + { + "epoch": 0.16943056943056944, + "grad_norm": 0.6796875, + "learning_rate": 0.0002, + "loss": 1.0129, + "step": 212 + }, + { + "epoch": 0.17022977022977023, + "grad_norm": 0.63671875, + "learning_rate": 0.0002, + "loss": 1.0276, + "step": 213 + }, + { + "epoch": 0.17102897102897102, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 1.0164, + "step": 214 + }, + { + "epoch": 0.17182817182817184, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 1.0174, + "step": 215 + }, + { + "epoch": 0.17262737262737263, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 1.0293, + "step": 216 + }, + { + "epoch": 0.17342657342657342, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 1.017, + "step": 217 + }, + { + "epoch": 0.17422577422577423, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 1.0159, + "step": 218 + }, + { + "epoch": 0.17502497502497502, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 1.0051, + "step": 219 + }, + { + "epoch": 0.17582417582417584, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 1.0038, + "step": 220 + }, + { + "epoch": 0.17662337662337663, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 1.0147, + "step": 221 + }, + { + "epoch": 0.17742257742257742, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 1.0116, + "step": 222 + }, + { + "epoch": 0.17822177822177823, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 1.0103, + "step": 223 + }, + { + "epoch": 0.17902097902097902, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 1.0087, + "step": 224 + }, + { + "epoch": 0.1798201798201798, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 1.0171, + "step": 225 + }, + { + "epoch": 0.18061938061938063, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9997, + "step": 226 + }, + { + "epoch": 0.18141858141858141, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 1.0111, + "step": 227 + }, + { + "epoch": 0.18221778221778223, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 1.0054, + "step": 228 + }, + { + "epoch": 0.18301698301698302, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.9988, + "step": 229 + }, + { + "epoch": 0.1838161838161838, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 1.0008, + "step": 230 + }, + { + "epoch": 0.18461538461538463, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 1.0037, + "step": 231 + }, + { + "epoch": 0.18541458541458541, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 1.0063, + "step": 232 + }, + { + "epoch": 0.1862137862137862, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.9908, + "step": 233 + }, + { + "epoch": 0.18701298701298702, + "grad_norm": 0.79296875, + "learning_rate": 0.0002, + "loss": 1.0117, + "step": 234 + }, + { + "epoch": 0.1878121878121878, + "grad_norm": 1.1796875, + "learning_rate": 0.0002, + "loss": 1.0002, + "step": 235 + }, + { + "epoch": 0.18861138861138863, + "grad_norm": 1.0, + "learning_rate": 0.0002, + "loss": 0.9998, + "step": 236 + }, + { + "epoch": 0.18941058941058941, + "grad_norm": 0.765625, + "learning_rate": 0.0002, + "loss": 1.0021, + "step": 237 + }, + { + "epoch": 0.1902097902097902, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.9999, + "step": 238 + }, + { + "epoch": 0.19100899100899102, + "grad_norm": 0.625, + "learning_rate": 0.0002, + "loss": 0.9985, + "step": 239 + }, + { + "epoch": 0.1918081918081918, + "grad_norm": 0.66015625, + "learning_rate": 0.0002, + "loss": 1.0006, + "step": 240 + }, + { + "epoch": 0.1926073926073926, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9983, + "step": 241 + }, + { + "epoch": 0.1934065934065934, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 1.0014, + "step": 242 + }, + { + "epoch": 0.1942057942057942, + "grad_norm": 0.609375, + "learning_rate": 0.0002, + "loss": 1.001, + "step": 243 + }, + { + "epoch": 0.19500499500499502, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.9903, + "step": 244 + }, + { + "epoch": 0.1958041958041958, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.995, + "step": 245 + }, + { + "epoch": 0.1966033966033966, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9974, + "step": 246 + }, + { + "epoch": 0.1974025974025974, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9987, + "step": 247 + }, + { + "epoch": 0.1982017982017982, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 1.0066, + "step": 248 + }, + { + "epoch": 0.199000999000999, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.99, + "step": 249 + }, + { + "epoch": 0.1998001998001998, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.9969, + "step": 250 + }, + { + "epoch": 0.2005994005994006, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.9983, + "step": 251 + }, + { + "epoch": 0.2013986013986014, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 1.0044, + "step": 252 + }, + { + "epoch": 0.2021978021978022, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9928, + "step": 253 + }, + { + "epoch": 0.202997002997003, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9865, + "step": 254 + }, + { + "epoch": 0.2037962037962038, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9919, + "step": 255 + }, + { + "epoch": 0.2045954045954046, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.998, + "step": 256 + }, + { + "epoch": 0.20539460539460538, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.9927, + "step": 257 + }, + { + "epoch": 0.2061938061938062, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9888, + "step": 258 + }, + { + "epoch": 0.206993006993007, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9876, + "step": 259 + }, + { + "epoch": 0.2077922077922078, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.9935, + "step": 260 + }, + { + "epoch": 0.2085914085914086, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.9843, + "step": 261 + }, + { + "epoch": 0.20939060939060938, + "grad_norm": 0.66796875, + "learning_rate": 0.0002, + "loss": 0.9917, + "step": 262 + }, + { + "epoch": 0.2101898101898102, + "grad_norm": 0.93359375, + "learning_rate": 0.0002, + "loss": 0.9974, + "step": 263 + }, + { + "epoch": 0.210989010989011, + "grad_norm": 1.9140625, + "learning_rate": 0.0002, + "loss": 1.008, + "step": 264 + }, + { + "epoch": 0.21178821178821178, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9963, + "step": 265 + }, + { + "epoch": 0.2125874125874126, + "grad_norm": 1.21875, + "learning_rate": 0.0002, + "loss": 1.0049, + "step": 266 + }, + { + "epoch": 0.21338661338661338, + "grad_norm": 1.6875, + "learning_rate": 0.0002, + "loss": 0.9916, + "step": 267 + }, + { + "epoch": 0.2141858141858142, + "grad_norm": 0.6328125, + "learning_rate": 0.0002, + "loss": 0.9931, + "step": 268 + }, + { + "epoch": 0.214985014985015, + "grad_norm": 2.0625, + "learning_rate": 0.0002, + "loss": 1.0043, + "step": 269 + }, + { + "epoch": 0.21578421578421578, + "grad_norm": 0.99609375, + "learning_rate": 0.0002, + "loss": 0.9899, + "step": 270 + }, + { + "epoch": 0.2165834165834166, + "grad_norm": 5.3125, + "learning_rate": 0.0002, + "loss": 1.0724, + "step": 271 + }, + { + "epoch": 0.21738261738261738, + "grad_norm": 5.0625, + "learning_rate": 0.0002, + "loss": 1.0816, + "step": 272 + }, + { + "epoch": 0.21818181818181817, + "grad_norm": 1.734375, + "learning_rate": 0.0002, + "loss": 1.0166, + "step": 273 + }, + { + "epoch": 0.218981018981019, + "grad_norm": 3.703125, + "learning_rate": 0.0002, + "loss": 1.0355, + "step": 274 + }, + { + "epoch": 0.21978021978021978, + "grad_norm": 3.1875, + "learning_rate": 0.0002, + "loss": 1.0681, + "step": 275 + }, + { + "epoch": 0.2205794205794206, + "grad_norm": 2.15625, + "learning_rate": 0.0002, + "loss": 1.0401, + "step": 276 + }, + { + "epoch": 0.22137862137862138, + "grad_norm": 1.6796875, + "learning_rate": 0.0002, + "loss": 1.0299, + "step": 277 + }, + { + "epoch": 0.22217782217782217, + "grad_norm": 1.984375, + "learning_rate": 0.0002, + "loss": 1.0256, + "step": 278 + }, + { + "epoch": 0.222977022977023, + "grad_norm": 1.125, + "learning_rate": 0.0002, + "loss": 1.023, + "step": 279 + }, + { + "epoch": 0.22377622377622378, + "grad_norm": 1.734375, + "learning_rate": 0.0002, + "loss": 1.0184, + "step": 280 + }, + { + "epoch": 0.22457542457542456, + "grad_norm": 1.15625, + "learning_rate": 0.0002, + "loss": 1.0154, + "step": 281 + }, + { + "epoch": 0.22537462537462538, + "grad_norm": 1.5390625, + "learning_rate": 0.0002, + "loss": 1.0054, + "step": 282 + }, + { + "epoch": 0.22617382617382617, + "grad_norm": 1.125, + "learning_rate": 0.0002, + "loss": 1.0123, + "step": 283 + }, + { + "epoch": 0.226973026973027, + "grad_norm": 1.8203125, + "learning_rate": 0.0002, + "loss": 1.0089, + "step": 284 + }, + { + "epoch": 0.22777222777222778, + "grad_norm": 1.546875, + "learning_rate": 0.0002, + "loss": 1.0159, + "step": 285 + }, + { + "epoch": 0.22857142857142856, + "grad_norm": 1.5078125, + "learning_rate": 0.0002, + "loss": 1.0028, + "step": 286 + }, + { + "epoch": 0.22937062937062938, + "grad_norm": 1.3515625, + "learning_rate": 0.0002, + "loss": 1.0018, + "step": 287 + }, + { + "epoch": 0.23016983016983017, + "grad_norm": 1.3984375, + "learning_rate": 0.0002, + "loss": 1.0481, + "step": 288 + }, + { + "epoch": 0.23096903096903096, + "grad_norm": 1.171875, + "learning_rate": 0.0002, + "loss": 1.0038, + "step": 289 + }, + { + "epoch": 0.23176823176823177, + "grad_norm": 1.375, + "learning_rate": 0.0002, + "loss": 1.0024, + "step": 290 + }, + { + "epoch": 0.23256743256743256, + "grad_norm": 1.0546875, + "learning_rate": 0.0002, + "loss": 0.9914, + "step": 291 + }, + { + "epoch": 0.23336663336663338, + "grad_norm": 1.4296875, + "learning_rate": 0.0002, + "loss": 0.9959, + "step": 292 + }, + { + "epoch": 0.23416583416583417, + "grad_norm": 1.2734375, + "learning_rate": 0.0002, + "loss": 1.0094, + "step": 293 + }, + { + "epoch": 0.23496503496503496, + "grad_norm": 1.125, + "learning_rate": 0.0002, + "loss": 0.9992, + "step": 294 + }, + { + "epoch": 0.23576423576423577, + "grad_norm": 0.984375, + "learning_rate": 0.0002, + "loss": 0.9999, + "step": 295 + }, + { + "epoch": 0.23656343656343656, + "grad_norm": 0.96484375, + "learning_rate": 0.0002, + "loss": 0.9901, + "step": 296 + }, + { + "epoch": 0.23736263736263735, + "grad_norm": 0.8046875, + "learning_rate": 0.0002, + "loss": 0.9891, + "step": 297 + }, + { + "epoch": 0.23816183816183817, + "grad_norm": 0.796875, + "learning_rate": 0.0002, + "loss": 1.003, + "step": 298 + }, + { + "epoch": 0.23896103896103896, + "grad_norm": 0.625, + "learning_rate": 0.0002, + "loss": 1.0007, + "step": 299 + }, + { + "epoch": 0.23976023976023977, + "grad_norm": 0.69140625, + "learning_rate": 0.0002, + "loss": 0.9954, + "step": 300 + }, + { + "epoch": 0.24055944055944056, + "grad_norm": 0.6171875, + "learning_rate": 0.0002, + "loss": 0.9901, + "step": 301 + }, + { + "epoch": 0.24135864135864135, + "grad_norm": 0.6328125, + "learning_rate": 0.0002, + "loss": 0.9953, + "step": 302 + }, + { + "epoch": 0.24215784215784217, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.9948, + "step": 303 + }, + { + "epoch": 0.24295704295704296, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.9864, + "step": 304 + }, + { + "epoch": 0.24375624375624375, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.9957, + "step": 305 + }, + { + "epoch": 0.24455544455544456, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9828, + "step": 306 + }, + { + "epoch": 0.24535464535464535, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.9981, + "step": 307 + }, + { + "epoch": 0.24615384615384617, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9915, + "step": 308 + }, + { + "epoch": 0.24695304695304696, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9876, + "step": 309 + }, + { + "epoch": 0.24775224775224775, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.994, + "step": 310 + }, + { + "epoch": 0.24855144855144856, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9861, + "step": 311 + }, + { + "epoch": 0.24935064935064935, + "grad_norm": 1.03125, + "learning_rate": 0.0002, + "loss": 0.9949, + "step": 312 + }, + { + "epoch": 0.25014985014985014, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.987, + "step": 313 + }, + { + "epoch": 0.25094905094905096, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9863, + "step": 314 + }, + { + "epoch": 0.2517482517482518, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.9894, + "step": 315 + }, + { + "epoch": 0.25254745254745253, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9915, + "step": 316 + }, + { + "epoch": 0.25334665334665335, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.9934, + "step": 317 + }, + { + "epoch": 0.25414585414585417, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.996, + "step": 318 + }, + { + "epoch": 0.2549450549450549, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.9822, + "step": 319 + }, + { + "epoch": 0.25574425574425574, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 1.0072, + "step": 320 + }, + { + "epoch": 0.25654345654345656, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.9891, + "step": 321 + }, + { + "epoch": 0.2573426573426573, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.9901, + "step": 322 + }, + { + "epoch": 0.25814185814185814, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.9991, + "step": 323 + }, + { + "epoch": 0.25894105894105895, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.9899, + "step": 324 + }, + { + "epoch": 0.2597402597402597, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.9921, + "step": 325 + }, + { + "epoch": 0.26053946053946053, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.9796, + "step": 326 + }, + { + "epoch": 0.26133866133866135, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.9879, + "step": 327 + }, + { + "epoch": 0.26213786213786217, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.9948, + "step": 328 + }, + { + "epoch": 0.2629370629370629, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9917, + "step": 329 + }, + { + "epoch": 0.26373626373626374, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.9928, + "step": 330 + }, + { + "epoch": 0.26453546453546456, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.9894, + "step": 331 + }, + { + "epoch": 0.2653346653346653, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.9864, + "step": 332 + }, + { + "epoch": 0.26613386613386614, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.9796, + "step": 333 + }, + { + "epoch": 0.26693306693306695, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.9782, + "step": 334 + }, + { + "epoch": 0.2677322677322677, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.9848, + "step": 335 + }, + { + "epoch": 0.26853146853146853, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.9848, + "step": 336 + }, + { + "epoch": 0.26933066933066935, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.9832, + "step": 337 + }, + { + "epoch": 0.2701298701298701, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.9817, + "step": 338 + }, + { + "epoch": 0.2709290709290709, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.9831, + "step": 339 + }, + { + "epoch": 0.27172827172827174, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.9785, + "step": 340 + }, + { + "epoch": 0.2725274725274725, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.989, + "step": 341 + }, + { + "epoch": 0.2733266733266733, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9831, + "step": 342 + }, + { + "epoch": 0.27412587412587414, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.9801, + "step": 343 + }, + { + "epoch": 0.27492507492507495, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.9868, + "step": 344 + }, + { + "epoch": 0.2757242757242757, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.989, + "step": 345 + }, + { + "epoch": 0.27652347652347653, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.9765, + "step": 346 + }, + { + "epoch": 0.27732267732267735, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.9844, + "step": 347 + }, + { + "epoch": 0.2781218781218781, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9885, + "step": 348 + }, + { + "epoch": 0.2789210789210789, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.9754, + "step": 349 + }, + { + "epoch": 0.27972027972027974, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9797, + "step": 350 + }, + { + "epoch": 0.2805194805194805, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9778, + "step": 351 + }, + { + "epoch": 0.2813186813186813, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9793, + "step": 352 + }, + { + "epoch": 0.28211788211788213, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.9795, + "step": 353 + }, + { + "epoch": 0.2829170829170829, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.983, + "step": 354 + }, + { + "epoch": 0.2837162837162837, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.976, + "step": 355 + }, + { + "epoch": 0.28451548451548453, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.9719, + "step": 356 + }, + { + "epoch": 0.2853146853146853, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9774, + "step": 357 + }, + { + "epoch": 0.2861138861138861, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9754, + "step": 358 + }, + { + "epoch": 0.2869130869130869, + "grad_norm": 0.90625, + "learning_rate": 0.0002, + "loss": 1.0023, + "step": 359 + }, + { + "epoch": 0.28771228771228774, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9793, + "step": 360 + }, + { + "epoch": 0.2885114885114885, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.982, + "step": 361 + }, + { + "epoch": 0.2893106893106893, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.9803, + "step": 362 + }, + { + "epoch": 0.29010989010989013, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.977, + "step": 363 + }, + { + "epoch": 0.2909090909090909, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.985, + "step": 364 + }, + { + "epoch": 0.2917082917082917, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9818, + "step": 365 + }, + { + "epoch": 0.29250749250749253, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.9947, + "step": 366 + }, + { + "epoch": 0.2933066933066933, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.981, + "step": 367 + }, + { + "epoch": 0.2941058941058941, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9836, + "step": 368 + }, + { + "epoch": 0.2949050949050949, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.9741, + "step": 369 + }, + { + "epoch": 0.2957042957042957, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.976, + "step": 370 + }, + { + "epoch": 0.2965034965034965, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 0.9789, + "step": 371 + }, + { + "epoch": 0.2973026973026973, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.9917, + "step": 372 + }, + { + "epoch": 0.2981018981018981, + "grad_norm": 0.671875, + "learning_rate": 0.0002, + "loss": 0.9778, + "step": 373 + }, + { + "epoch": 0.2989010989010989, + "grad_norm": 0.90625, + "learning_rate": 0.0002, + "loss": 0.9772, + "step": 374 + }, + { + "epoch": 0.2997002997002997, + "grad_norm": 1.96875, + "learning_rate": 0.0002, + "loss": 0.9921, + "step": 375 + }, + { + "epoch": 0.3004995004995005, + "grad_norm": 0.75, + "learning_rate": 0.0002, + "loss": 0.9764, + "step": 376 + }, + { + "epoch": 0.3012987012987013, + "grad_norm": 1.640625, + "learning_rate": 0.0002, + "loss": 0.9932, + "step": 377 + }, + { + "epoch": 0.3020979020979021, + "grad_norm": 1.0, + "learning_rate": 0.0002, + "loss": 0.9853, + "step": 378 + }, + { + "epoch": 0.3028971028971029, + "grad_norm": 2.046875, + "learning_rate": 0.0002, + "loss": 0.9954, + "step": 379 + }, + { + "epoch": 0.3036963036963037, + "grad_norm": 0.93359375, + "learning_rate": 0.0002, + "loss": 0.9834, + "step": 380 + }, + { + "epoch": 0.3044955044955045, + "grad_norm": 2.78125, + "learning_rate": 0.0002, + "loss": 1.025, + "step": 381 + }, + { + "epoch": 0.3052947052947053, + "grad_norm": 1.8046875, + "learning_rate": 0.0002, + "loss": 1.0111, + "step": 382 + }, + { + "epoch": 0.3060939060939061, + "grad_norm": 1.8359375, + "learning_rate": 0.0002, + "loss": 0.9947, + "step": 383 + }, + { + "epoch": 0.3068931068931069, + "grad_norm": 1.203125, + "learning_rate": 0.0002, + "loss": 0.9982, + "step": 384 + }, + { + "epoch": 0.3076923076923077, + "grad_norm": 1.53125, + "learning_rate": 0.0002, + "loss": 1.0028, + "step": 385 + }, + { + "epoch": 0.30849150849150847, + "grad_norm": 1.21875, + "learning_rate": 0.0002, + "loss": 0.9915, + "step": 386 + }, + { + "epoch": 0.3092907092907093, + "grad_norm": 1.34375, + "learning_rate": 0.0002, + "loss": 0.9934, + "step": 387 + }, + { + "epoch": 0.3100899100899101, + "grad_norm": 0.96875, + "learning_rate": 0.0002, + "loss": 0.9919, + "step": 388 + }, + { + "epoch": 0.31088911088911086, + "grad_norm": 1.1875, + "learning_rate": 0.0002, + "loss": 0.9982, + "step": 389 + }, + { + "epoch": 0.3116883116883117, + "grad_norm": 0.90234375, + "learning_rate": 0.0002, + "loss": 0.9804, + "step": 390 + }, + { + "epoch": 0.3124875124875125, + "grad_norm": 0.94140625, + "learning_rate": 0.0002, + "loss": 0.9811, + "step": 391 + }, + { + "epoch": 0.3132867132867133, + "grad_norm": 1.4765625, + "learning_rate": 0.0002, + "loss": 0.9871, + "step": 392 + }, + { + "epoch": 0.3140859140859141, + "grad_norm": 0.9453125, + "learning_rate": 0.0002, + "loss": 0.9841, + "step": 393 + }, + { + "epoch": 0.3148851148851149, + "grad_norm": 1.140625, + "learning_rate": 0.0002, + "loss": 0.989, + "step": 394 + }, + { + "epoch": 0.3156843156843157, + "grad_norm": 0.9140625, + "learning_rate": 0.0002, + "loss": 0.9861, + "step": 395 + }, + { + "epoch": 0.31648351648351647, + "grad_norm": 0.94921875, + "learning_rate": 0.0002, + "loss": 0.9875, + "step": 396 + }, + { + "epoch": 0.3172827172827173, + "grad_norm": 0.89453125, + "learning_rate": 0.0002, + "loss": 0.9877, + "step": 397 + }, + { + "epoch": 0.3180819180819181, + "grad_norm": 0.8203125, + "learning_rate": 0.0002, + "loss": 0.9823, + "step": 398 + }, + { + "epoch": 0.31888111888111886, + "grad_norm": 0.67578125, + "learning_rate": 0.0002, + "loss": 0.9777, + "step": 399 + }, + { + "epoch": 0.3196803196803197, + "grad_norm": 0.6953125, + "learning_rate": 0.0002, + "loss": 0.9886, + "step": 400 + }, + { + "epoch": 0.3204795204795205, + "grad_norm": 0.6171875, + "learning_rate": 0.0002, + "loss": 0.9856, + "step": 401 + }, + { + "epoch": 0.32127872127872126, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9786, + "step": 402 + }, + { + "epoch": 0.3220779220779221, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.9778, + "step": 403 + }, + { + "epoch": 0.3228771228771229, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.9796, + "step": 404 + }, + { + "epoch": 0.32367632367632365, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9739, + "step": 405 + }, + { + "epoch": 0.32447552447552447, + "grad_norm": 0.734375, + "learning_rate": 0.0002, + "loss": 0.9844, + "step": 406 + }, + { + "epoch": 0.3252747252747253, + "grad_norm": 0.87109375, + "learning_rate": 0.0002, + "loss": 0.9847, + "step": 407 + }, + { + "epoch": 0.3260739260739261, + "grad_norm": 0.74609375, + "learning_rate": 0.0002, + "loss": 0.9812, + "step": 408 + }, + { + "epoch": 0.32687312687312686, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9782, + "step": 409 + }, + { + "epoch": 0.3276723276723277, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.9722, + "step": 410 + }, + { + "epoch": 0.3284715284715285, + "grad_norm": 0.6015625, + "learning_rate": 0.0002, + "loss": 0.9673, + "step": 411 + }, + { + "epoch": 0.32927072927072926, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.9775, + "step": 412 + }, + { + "epoch": 0.3300699300699301, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.9737, + "step": 413 + }, + { + "epoch": 0.3308691308691309, + "grad_norm": 0.83203125, + "learning_rate": 0.0002, + "loss": 0.9722, + "step": 414 + }, + { + "epoch": 0.33166833166833165, + "grad_norm": 1.2265625, + "learning_rate": 0.0002, + "loss": 0.975, + "step": 415 + }, + { + "epoch": 0.33246753246753247, + "grad_norm": 0.9375, + "learning_rate": 0.0002, + "loss": 0.9811, + "step": 416 + }, + { + "epoch": 0.3332667332667333, + "grad_norm": 0.60546875, + "learning_rate": 0.0002, + "loss": 0.9751, + "step": 417 + }, + { + "epoch": 0.33406593406593404, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.9806, + "step": 418 + }, + { + "epoch": 0.33486513486513486, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.9752, + "step": 419 + }, + { + "epoch": 0.3356643356643357, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.966, + "step": 420 + }, + { + "epoch": 0.33646353646353644, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9753, + "step": 421 + }, + { + "epoch": 0.33726273726273726, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.9634, + "step": 422 + }, + { + "epoch": 0.33806193806193807, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.9671, + "step": 423 + }, + { + "epoch": 0.3388611388611389, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.9711, + "step": 424 + }, + { + "epoch": 0.33966033966033965, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 1.013, + "step": 425 + }, + { + "epoch": 0.34045954045954047, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.9675, + "step": 426 + }, + { + "epoch": 0.3412587412587413, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.9732, + "step": 427 + }, + { + "epoch": 0.34205794205794204, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.9699, + "step": 428 + }, + { + "epoch": 0.34285714285714286, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.9758, + "step": 429 + }, + { + "epoch": 0.3436563436563437, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.9761, + "step": 430 + }, + { + "epoch": 0.34445554445554444, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.9673, + "step": 431 + }, + { + "epoch": 0.34525474525474525, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.9802, + "step": 432 + }, + { + "epoch": 0.34605394605394607, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.9703, + "step": 433 + }, + { + "epoch": 0.34685314685314683, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.9706, + "step": 434 + }, + { + "epoch": 0.34765234765234765, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.981, + "step": 435 + }, + { + "epoch": 0.34845154845154847, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.9759, + "step": 436 + }, + { + "epoch": 0.3492507492507492, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.9593, + "step": 437 + }, + { + "epoch": 0.35004995004995004, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.9729, + "step": 438 + }, + { + "epoch": 0.35084915084915086, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.9667, + "step": 439 + }, + { + "epoch": 0.3516483516483517, + "grad_norm": 0.578125, + "learning_rate": 0.0002, + "loss": 0.9724, + "step": 440 + }, + { + "epoch": 0.35244755244755244, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.9696, + "step": 441 + }, + { + "epoch": 0.35324675324675325, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.973, + "step": 442 + }, + { + "epoch": 0.35404595404595407, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9721, + "step": 443 + }, + { + "epoch": 0.35484515484515483, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.9729, + "step": 444 + }, + { + "epoch": 0.35564435564435565, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9707, + "step": 445 + }, + { + "epoch": 0.35644355644355646, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9698, + "step": 446 + }, + { + "epoch": 0.3572427572427572, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.9627, + "step": 447 + }, + { + "epoch": 0.35804195804195804, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.9746, + "step": 448 + }, + { + "epoch": 0.35884115884115886, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.9672, + "step": 449 + }, + { + "epoch": 0.3596403596403596, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.9622, + "step": 450 + }, + { + "epoch": 0.36043956043956044, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.9717, + "step": 451 + }, + { + "epoch": 0.36123876123876125, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9732, + "step": 452 + }, + { + "epoch": 0.362037962037962, + "grad_norm": 0.60546875, + "learning_rate": 0.0002, + "loss": 0.988, + "step": 453 + }, + { + "epoch": 0.36283716283716283, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.9639, + "step": 454 + }, + { + "epoch": 0.36363636363636365, + "grad_norm": 0.8125, + "learning_rate": 0.0002, + "loss": 0.9725, + "step": 455 + }, + { + "epoch": 0.36443556443556446, + "grad_norm": 1.484375, + "learning_rate": 0.0002, + "loss": 0.9809, + "step": 456 + }, + { + "epoch": 0.3652347652347652, + "grad_norm": 0.96875, + "learning_rate": 0.0002, + "loss": 0.9707, + "step": 457 + }, + { + "epoch": 0.36603396603396604, + "grad_norm": 0.94921875, + "learning_rate": 0.0002, + "loss": 0.9667, + "step": 458 + }, + { + "epoch": 0.36683316683316686, + "grad_norm": 0.97265625, + "learning_rate": 0.0002, + "loss": 0.9771, + "step": 459 + }, + { + "epoch": 0.3676323676323676, + "grad_norm": 1.140625, + "learning_rate": 0.0002, + "loss": 0.9768, + "step": 460 + }, + { + "epoch": 0.36843156843156843, + "grad_norm": 0.96875, + "learning_rate": 0.0002, + "loss": 0.973, + "step": 461 + }, + { + "epoch": 0.36923076923076925, + "grad_norm": 1.09375, + "learning_rate": 0.0002, + "loss": 0.9686, + "step": 462 + }, + { + "epoch": 0.37002997002997, + "grad_norm": 0.98828125, + "learning_rate": 0.0002, + "loss": 0.9802, + "step": 463 + }, + { + "epoch": 0.37082917082917083, + "grad_norm": 1.453125, + "learning_rate": 0.0002, + "loss": 0.9825, + "step": 464 + }, + { + "epoch": 0.37162837162837165, + "grad_norm": 0.78515625, + "learning_rate": 0.0002, + "loss": 0.9736, + "step": 465 + }, + { + "epoch": 0.3724275724275724, + "grad_norm": 1.203125, + "learning_rate": 0.0002, + "loss": 0.9777, + "step": 466 + }, + { + "epoch": 0.3732267732267732, + "grad_norm": 1.296875, + "learning_rate": 0.0002, + "loss": 1.0053, + "step": 467 + }, + { + "epoch": 0.37402597402597404, + "grad_norm": 0.74609375, + "learning_rate": 0.0002, + "loss": 0.9675, + "step": 468 + }, + { + "epoch": 0.3748251748251748, + "grad_norm": 1.9296875, + "learning_rate": 0.0002, + "loss": 0.9852, + "step": 469 + }, + { + "epoch": 0.3756243756243756, + "grad_norm": 0.9609375, + "learning_rate": 0.0002, + "loss": 0.9758, + "step": 470 + }, + { + "epoch": 0.37642357642357643, + "grad_norm": 4.25, + "learning_rate": 0.0002, + "loss": 1.0281, + "step": 471 + }, + { + "epoch": 0.37722277722277725, + "grad_norm": 3.8125, + "learning_rate": 0.0002, + "loss": 1.0331, + "step": 472 + }, + { + "epoch": 0.378021978021978, + "grad_norm": 1.9609375, + "learning_rate": 0.0002, + "loss": 0.9964, + "step": 473 + }, + { + "epoch": 0.37882117882117883, + "grad_norm": 1.7890625, + "learning_rate": 0.0002, + "loss": 0.9965, + "step": 474 + }, + { + "epoch": 0.37962037962037964, + "grad_norm": 1.8984375, + "learning_rate": 0.0002, + "loss": 0.9866, + "step": 475 + }, + { + "epoch": 0.3804195804195804, + "grad_norm": 1.3671875, + "learning_rate": 0.0002, + "loss": 0.9816, + "step": 476 + }, + { + "epoch": 0.3812187812187812, + "grad_norm": 1.484375, + "learning_rate": 0.0002, + "loss": 0.9933, + "step": 477 + }, + { + "epoch": 0.38201798201798204, + "grad_norm": 1.109375, + "learning_rate": 0.0002, + "loss": 0.9881, + "step": 478 + }, + { + "epoch": 0.3828171828171828, + "grad_norm": 1.6328125, + "learning_rate": 0.0002, + "loss": 0.9979, + "step": 479 + }, + { + "epoch": 0.3836163836163836, + "grad_norm": 0.91015625, + "learning_rate": 0.0002, + "loss": 0.9754, + "step": 480 + }, + { + "epoch": 0.38441558441558443, + "grad_norm": 1.2578125, + "learning_rate": 0.0002, + "loss": 0.9872, + "step": 481 + }, + { + "epoch": 0.3852147852147852, + "grad_norm": 1.53125, + "learning_rate": 0.0002, + "loss": 0.9851, + "step": 482 + }, + { + "epoch": 0.386013986013986, + "grad_norm": 1.0078125, + "learning_rate": 0.0002, + "loss": 0.9769, + "step": 483 + }, + { + "epoch": 0.3868131868131868, + "grad_norm": 1.09375, + "learning_rate": 0.0002, + "loss": 0.982, + "step": 484 + }, + { + "epoch": 0.3876123876123876, + "grad_norm": 1.03125, + "learning_rate": 0.0002, + "loss": 0.9722, + "step": 485 + }, + { + "epoch": 0.3884115884115884, + "grad_norm": 1.0234375, + "learning_rate": 0.0002, + "loss": 0.9848, + "step": 486 + }, + { + "epoch": 0.3892107892107892, + "grad_norm": 0.83984375, + "learning_rate": 0.0002, + "loss": 0.9681, + "step": 487 + }, + { + "epoch": 0.39000999000999004, + "grad_norm": 0.6953125, + "learning_rate": 0.0002, + "loss": 0.9778, + "step": 488 + }, + { + "epoch": 0.3908091908091908, + "grad_norm": 0.65625, + "learning_rate": 0.0002, + "loss": 0.9768, + "step": 489 + }, + { + "epoch": 0.3916083916083916, + "grad_norm": 0.6796875, + "learning_rate": 0.0002, + "loss": 0.9671, + "step": 490 + }, + { + "epoch": 0.39240759240759243, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.9813, + "step": 491 + }, + { + "epoch": 0.3932067932067932, + "grad_norm": 0.6015625, + "learning_rate": 0.0002, + "loss": 0.9819, + "step": 492 + }, + { + "epoch": 0.394005994005994, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.9845, + "step": 493 + }, + { + "epoch": 0.3948051948051948, + "grad_norm": 0.58203125, + "learning_rate": 0.0002, + "loss": 0.9789, + "step": 494 + }, + { + "epoch": 0.3956043956043956, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.9736, + "step": 495 + }, + { + "epoch": 0.3964035964035964, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.9737, + "step": 496 + }, + { + "epoch": 0.3972027972027972, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.9673, + "step": 497 + }, + { + "epoch": 0.398001998001998, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9686, + "step": 498 + }, + { + "epoch": 0.3988011988011988, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9774, + "step": 499 + }, + { + "epoch": 0.3996003996003996, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9708, + "step": 500 + }, + { + "epoch": 0.4003996003996004, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9645, + "step": 501 + }, + { + "epoch": 0.4011988011988012, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9748, + "step": 502 + }, + { + "epoch": 0.401998001998002, + "grad_norm": 0.66015625, + "learning_rate": 0.0002, + "loss": 0.9789, + "step": 503 + }, + { + "epoch": 0.4027972027972028, + "grad_norm": 1.0859375, + "learning_rate": 0.0002, + "loss": 0.9741, + "step": 504 + }, + { + "epoch": 0.4035964035964036, + "grad_norm": 1.9921875, + "learning_rate": 0.0002, + "loss": 0.9733, + "step": 505 + }, + { + "epoch": 0.4043956043956044, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.971, + "step": 506 + }, + { + "epoch": 0.4051948051948052, + "grad_norm": 2.453125, + "learning_rate": 0.0002, + "loss": 0.9747, + "step": 507 + }, + { + "epoch": 0.405994005994006, + "grad_norm": 1.3125, + "learning_rate": 0.0002, + "loss": 0.9571, + "step": 508 + }, + { + "epoch": 0.4067932067932068, + "grad_norm": 4.375, + "learning_rate": 0.0002, + "loss": 0.9966, + "step": 509 + }, + { + "epoch": 0.4075924075924076, + "grad_norm": 3.71875, + "learning_rate": 0.0002, + "loss": 0.9856, + "step": 510 + }, + { + "epoch": 0.4083916083916084, + "grad_norm": 1.0, + "learning_rate": 0.0002, + "loss": 0.9844, + "step": 511 + }, + { + "epoch": 0.4091908091908092, + "grad_norm": 4.1875, + "learning_rate": 0.0002, + "loss": 1.0065, + "step": 512 + }, + { + "epoch": 0.40999000999001, + "grad_norm": 3.828125, + "learning_rate": 0.0002, + "loss": 1.0032, + "step": 513 + }, + { + "epoch": 0.41078921078921077, + "grad_norm": 1.609375, + "learning_rate": 0.0002, + "loss": 0.9871, + "step": 514 + }, + { + "epoch": 0.4115884115884116, + "grad_norm": 2.390625, + "learning_rate": 0.0002, + "loss": 1.0058, + "step": 515 + }, + { + "epoch": 0.4123876123876124, + "grad_norm": 2.03125, + "learning_rate": 0.0002, + "loss": 1.003, + "step": 516 + }, + { + "epoch": 0.41318681318681316, + "grad_norm": 1.6328125, + "learning_rate": 0.0002, + "loss": 0.9935, + "step": 517 + }, + { + "epoch": 0.413986013986014, + "grad_norm": 1.5625, + "learning_rate": 0.0002, + "loss": 0.9933, + "step": 518 + }, + { + "epoch": 0.4147852147852148, + "grad_norm": 0.9921875, + "learning_rate": 0.0002, + "loss": 0.9821, + "step": 519 + }, + { + "epoch": 0.4155844155844156, + "grad_norm": 1.1484375, + "learning_rate": 0.0002, + "loss": 0.981, + "step": 520 + }, + { + "epoch": 0.4163836163836164, + "grad_norm": 1.3203125, + "learning_rate": 0.0002, + "loss": 0.9768, + "step": 521 + }, + { + "epoch": 0.4171828171828172, + "grad_norm": 0.8515625, + "learning_rate": 0.0002, + "loss": 0.9851, + "step": 522 + }, + { + "epoch": 0.417982017982018, + "grad_norm": 0.69140625, + "learning_rate": 0.0002, + "loss": 0.9805, + "step": 523 + }, + { + "epoch": 0.41878121878121877, + "grad_norm": 0.8515625, + "learning_rate": 0.0002, + "loss": 0.9721, + "step": 524 + }, + { + "epoch": 0.4195804195804196, + "grad_norm": 0.91796875, + "learning_rate": 0.0002, + "loss": 0.9758, + "step": 525 + }, + { + "epoch": 0.4203796203796204, + "grad_norm": 0.75, + "learning_rate": 0.0002, + "loss": 0.9759, + "step": 526 + }, + { + "epoch": 0.42117882117882116, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.9673, + "step": 527 + }, + { + "epoch": 0.421978021978022, + "grad_norm": 0.734375, + "learning_rate": 0.0002, + "loss": 0.9705, + "step": 528 + }, + { + "epoch": 0.4227772227772228, + "grad_norm": 0.6015625, + "learning_rate": 0.0002, + "loss": 0.9699, + "step": 529 + }, + { + "epoch": 0.42357642357642356, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.977, + "step": 530 + }, + { + "epoch": 0.42437562437562437, + "grad_norm": 0.6171875, + "learning_rate": 0.0002, + "loss": 0.9698, + "step": 531 + }, + { + "epoch": 0.4251748251748252, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9645, + "step": 532 + }, + { + "epoch": 0.42597402597402595, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9701, + "step": 533 + }, + { + "epoch": 0.42677322677322677, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9823, + "step": 534 + }, + { + "epoch": 0.4275724275724276, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9652, + "step": 535 + }, + { + "epoch": 0.4283716283716284, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 1.0032, + "step": 536 + }, + { + "epoch": 0.42917082917082916, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9627, + "step": 537 + }, + { + "epoch": 0.42997002997003, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.9722, + "step": 538 + }, + { + "epoch": 0.4307692307692308, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.9705, + "step": 539 + }, + { + "epoch": 0.43156843156843155, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.9534, + "step": 540 + }, + { + "epoch": 0.43236763236763237, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.9605, + "step": 541 + }, + { + "epoch": 0.4331668331668332, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.9728, + "step": 542 + }, + { + "epoch": 0.43396603396603395, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.9666, + "step": 543 + }, + { + "epoch": 0.43476523476523476, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9846, + "step": 544 + }, + { + "epoch": 0.4355644355644356, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.9661, + "step": 545 + }, + { + "epoch": 0.43636363636363634, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.9567, + "step": 546 + }, + { + "epoch": 0.43716283716283716, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.965, + "step": 547 + }, + { + "epoch": 0.437962037962038, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.9692, + "step": 548 + }, + { + "epoch": 0.43876123876123874, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.9624, + "step": 549 + }, + { + "epoch": 0.43956043956043955, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.9629, + "step": 550 + }, + { + "epoch": 0.44035964035964037, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.9585, + "step": 551 + }, + { + "epoch": 0.4411588411588412, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.9641, + "step": 552 + }, + { + "epoch": 0.44195804195804195, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.9625, + "step": 553 + }, + { + "epoch": 0.44275724275724276, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.9725, + "step": 554 + }, + { + "epoch": 0.4435564435564436, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.9572, + "step": 555 + }, + { + "epoch": 0.44435564435564434, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.9528, + "step": 556 + }, + { + "epoch": 0.44515484515484516, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.9578, + "step": 557 + }, + { + "epoch": 0.445954045954046, + "grad_norm": 0.76171875, + "learning_rate": 0.0002, + "loss": 0.9924, + "step": 558 + }, + { + "epoch": 0.44675324675324674, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.9672, + "step": 559 + }, + { + "epoch": 0.44755244755244755, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.961, + "step": 560 + }, + { + "epoch": 0.44835164835164837, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.9637, + "step": 561 + }, + { + "epoch": 0.44915084915084913, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.9612, + "step": 562 + }, + { + "epoch": 0.44995004995004995, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.9778, + "step": 563 + }, + { + "epoch": 0.45074925074925076, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.9764, + "step": 564 + }, + { + "epoch": 0.4515484515484515, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.9674, + "step": 565 + }, + { + "epoch": 0.45234765234765234, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.975, + "step": 566 + }, + { + "epoch": 0.45314685314685316, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.9706, + "step": 567 + }, + { + "epoch": 0.453946053946054, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.9595, + "step": 568 + }, + { + "epoch": 0.45474525474525473, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.9616, + "step": 569 + }, + { + "epoch": 0.45554445554445555, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.9772, + "step": 570 + }, + { + "epoch": 0.45634365634365637, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.972, + "step": 571 + }, + { + "epoch": 0.45714285714285713, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.9671, + "step": 572 + }, + { + "epoch": 0.45794205794205795, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.9545, + "step": 573 + }, + { + "epoch": 0.45874125874125876, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.9589, + "step": 574 + }, + { + "epoch": 0.4595404595404595, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.9543, + "step": 575 + }, + { + "epoch": 0.46033966033966034, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.9687, + "step": 576 + }, + { + "epoch": 0.46113886113886116, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.9599, + "step": 577 + }, + { + "epoch": 0.4619380619380619, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.9639, + "step": 578 + }, + { + "epoch": 0.46273726273726273, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.9622, + "step": 579 + }, + { + "epoch": 0.46353646353646355, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.9585, + "step": 580 + }, + { + "epoch": 0.4643356643356643, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.9655, + "step": 581 + }, + { + "epoch": 0.4651348651348651, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.9657, + "step": 582 + }, + { + "epoch": 0.46593406593406594, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.9736, + "step": 583 + }, + { + "epoch": 0.46673326673326676, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.9554, + "step": 584 + }, + { + "epoch": 0.4675324675324675, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.9578, + "step": 585 + }, + { + "epoch": 0.46833166833166834, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.9584, + "step": 586 + }, + { + "epoch": 0.46913086913086915, + "grad_norm": 0.25390625, + "learning_rate": 0.0002, + "loss": 0.9568, + "step": 587 + }, + { + "epoch": 0.4699300699300699, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.9654, + "step": 588 + }, + { + "epoch": 0.47072927072927073, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.9606, + "step": 589 + }, + { + "epoch": 0.47152847152847155, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.9664, + "step": 590 + }, + { + "epoch": 0.4723276723276723, + "grad_norm": 0.25390625, + "learning_rate": 0.0002, + "loss": 0.9556, + "step": 591 + }, + { + "epoch": 0.4731268731268731, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.9619, + "step": 592 + }, + { + "epoch": 0.47392607392607394, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.9612, + "step": 593 + }, + { + "epoch": 0.4747252747252747, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.9556, + "step": 594 + }, + { + "epoch": 0.4755244755244755, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.9562, + "step": 595 + }, + { + "epoch": 0.47632367632367634, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.9704, + "step": 596 + }, + { + "epoch": 0.4771228771228771, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.9603, + "step": 597 + }, + { + "epoch": 0.4779220779220779, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.9578, + "step": 598 + }, + { + "epoch": 0.47872127872127873, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.9675, + "step": 599 + }, + { + "epoch": 0.47952047952047955, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.9496, + "step": 600 + }, + { + "epoch": 0.4803196803196803, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.9625, + "step": 601 + }, + { + "epoch": 0.4811188811188811, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.9678, + "step": 602 + }, + { + "epoch": 0.48191808191808194, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.9612, + "step": 603 + }, + { + "epoch": 0.4827172827172827, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.954, + "step": 604 + }, + { + "epoch": 0.4835164835164835, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.9645, + "step": 605 + }, + { + "epoch": 0.48431568431568434, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.9512, + "step": 606 + }, + { + "epoch": 0.4851148851148851, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.95, + "step": 607 + }, + { + "epoch": 0.4859140859140859, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.9608, + "step": 608 + }, + { + "epoch": 0.48671328671328673, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.9713, + "step": 609 + }, + { + "epoch": 0.4875124875124875, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.9625, + "step": 610 + }, + { + "epoch": 0.4883116883116883, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.9605, + "step": 611 + }, + { + "epoch": 0.4891108891108891, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9594, + "step": 612 + }, + { + "epoch": 0.4899100899100899, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.953, + "step": 613 + }, + { + "epoch": 0.4907092907092907, + "grad_norm": 0.87109375, + "learning_rate": 0.0002, + "loss": 0.9565, + "step": 614 + }, + { + "epoch": 0.4915084915084915, + "grad_norm": 1.9765625, + "learning_rate": 0.0002, + "loss": 0.9683, + "step": 615 + }, + { + "epoch": 0.49230769230769234, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.9607, + "step": 616 + }, + { + "epoch": 0.4931068931068931, + "grad_norm": 3.09375, + "learning_rate": 0.0002, + "loss": 0.9799, + "step": 617 + }, + { + "epoch": 0.4939060939060939, + "grad_norm": 1.71875, + "learning_rate": 0.0002, + "loss": 0.9576, + "step": 618 + }, + { + "epoch": 0.49470529470529473, + "grad_norm": 3.3125, + "learning_rate": 0.0002, + "loss": 0.9817, + "step": 619 + }, + { + "epoch": 0.4955044955044955, + "grad_norm": 1.3671875, + "learning_rate": 0.0002, + "loss": 0.9602, + "step": 620 + }, + { + "epoch": 0.4963036963036963, + "grad_norm": 1.2734375, + "learning_rate": 0.0002, + "loss": 0.9746, + "step": 621 + }, + { + "epoch": 0.4971028971028971, + "grad_norm": 0.765625, + "learning_rate": 0.0002, + "loss": 0.9646, + "step": 622 + }, + { + "epoch": 0.4979020979020979, + "grad_norm": 1.3203125, + "learning_rate": 0.0002, + "loss": 0.977, + "step": 623 + }, + { + "epoch": 0.4987012987012987, + "grad_norm": 0.64453125, + "learning_rate": 0.0002, + "loss": 0.9694, + "step": 624 + }, + { + "epoch": 0.4995004995004995, + "grad_norm": 0.89453125, + "learning_rate": 0.0002, + "loss": 0.9741, + "step": 625 + }, + { + "epoch": 0.5002997002997003, + "grad_norm": 1.03125, + "learning_rate": 0.0002, + "loss": 0.9771, + "step": 626 + }, + { + "epoch": 0.5010989010989011, + "grad_norm": 1.234375, + "learning_rate": 0.0002, + "loss": 0.9678, + "step": 627 + }, + { + "epoch": 0.5018981018981019, + "grad_norm": 1.71875, + "learning_rate": 0.0002, + "loss": 0.985, + "step": 628 + }, + { + "epoch": 0.5026973026973027, + "grad_norm": 1.0078125, + "learning_rate": 0.0002, + "loss": 0.9604, + "step": 629 + }, + { + "epoch": 0.5034965034965035, + "grad_norm": 2.34375, + "learning_rate": 0.0002, + "loss": 0.9795, + "step": 630 + }, + { + "epoch": 0.5042957042957043, + "grad_norm": 1.484375, + "learning_rate": 0.0002, + "loss": 0.9803, + "step": 631 + }, + { + "epoch": 0.5050949050949051, + "grad_norm": 4.125, + "learning_rate": 0.0002, + "loss": 1.0163, + "step": 632 + }, + { + "epoch": 0.5058941058941059, + "grad_norm": 3.828125, + "learning_rate": 0.0002, + "loss": 1.0087, + "step": 633 + }, + { + "epoch": 0.5066933066933067, + "grad_norm": 1.734375, + "learning_rate": 0.0002, + "loss": 0.985, + "step": 634 + }, + { + "epoch": 0.5074925074925075, + "grad_norm": 1.7109375, + "learning_rate": 0.0002, + "loss": 0.9874, + "step": 635 + }, + { + "epoch": 0.5082917082917083, + "grad_norm": 2.015625, + "learning_rate": 0.0002, + "loss": 0.9766, + "step": 636 + }, + { + "epoch": 0.509090909090909, + "grad_norm": 1.28125, + "learning_rate": 0.0002, + "loss": 0.9683, + "step": 637 + }, + { + "epoch": 0.5098901098901099, + "grad_norm": 2.8125, + "learning_rate": 0.0002, + "loss": 0.9666, + "step": 638 + }, + { + "epoch": 0.5106893106893107, + "grad_norm": 2.3125, + "learning_rate": 0.0002, + "loss": 0.9873, + "step": 639 + }, + { + "epoch": 0.5114885114885115, + "grad_norm": 3.484375, + "learning_rate": 0.0002, + "loss": 0.9802, + "step": 640 + }, + { + "epoch": 0.5122877122877123, + "grad_norm": 3.109375, + "learning_rate": 0.0002, + "loss": 0.9881, + "step": 641 + }, + { + "epoch": 0.5130869130869131, + "grad_norm": 3.15625, + "learning_rate": 0.0002, + "loss": 0.9783, + "step": 642 + }, + { + "epoch": 0.5138861138861139, + "grad_norm": 3.03125, + "learning_rate": 0.0002, + "loss": 0.9822, + "step": 643 + }, + { + "epoch": 0.5146853146853146, + "grad_norm": 1.7890625, + "learning_rate": 0.0002, + "loss": 0.9745, + "step": 644 + }, + { + "epoch": 0.5154845154845155, + "grad_norm": 1.6796875, + "learning_rate": 0.0002, + "loss": 0.9732, + "step": 645 + }, + { + "epoch": 0.5162837162837163, + "grad_norm": 2.640625, + "learning_rate": 0.0002, + "loss": 0.9754, + "step": 646 + }, + { + "epoch": 0.5170829170829171, + "grad_norm": 2.359375, + "learning_rate": 0.0002, + "loss": 0.9666, + "step": 647 + }, + { + "epoch": 0.5178821178821179, + "grad_norm": 2.609375, + "learning_rate": 0.0002, + "loss": 0.9754, + "step": 648 + }, + { + "epoch": 0.5186813186813187, + "grad_norm": 2.578125, + "learning_rate": 0.0002, + "loss": 0.9681, + "step": 649 + }, + { + "epoch": 0.5194805194805194, + "grad_norm": 1.3671875, + "learning_rate": 0.0002, + "loss": 0.9702, + "step": 650 + }, + { + "epoch": 0.5202797202797202, + "grad_norm": 1.2578125, + "learning_rate": 0.0002, + "loss": 0.9748, + "step": 651 + }, + { + "epoch": 0.5210789210789211, + "grad_norm": 2.515625, + "learning_rate": 0.0002, + "loss": 0.9633, + "step": 652 + }, + { + "epoch": 0.5218781218781219, + "grad_norm": 2.359375, + "learning_rate": 0.0002, + "loss": 0.9639, + "step": 653 + }, + { + "epoch": 0.5226773226773227, + "grad_norm": 1.65625, + "learning_rate": 0.0002, + "loss": 0.9638, + "step": 654 + }, + { + "epoch": 0.5234765234765235, + "grad_norm": 1.59375, + "learning_rate": 0.0002, + "loss": 0.9577, + "step": 655 + }, + { + "epoch": 0.5242757242757243, + "grad_norm": 1.75, + "learning_rate": 0.0002, + "loss": 0.9527, + "step": 656 + }, + { + "epoch": 0.525074925074925, + "grad_norm": 1.6015625, + "learning_rate": 0.0002, + "loss": 0.9651, + "step": 657 + }, + { + "epoch": 0.5258741258741259, + "grad_norm": 1.9140625, + "learning_rate": 0.0002, + "loss": 0.9684, + "step": 658 + }, + { + "epoch": 0.5266733266733267, + "grad_norm": 1.828125, + "learning_rate": 0.0002, + "loss": 0.9685, + "step": 659 + }, + { + "epoch": 0.5274725274725275, + "grad_norm": 1.296875, + "learning_rate": 0.0002, + "loss": 0.9737, + "step": 660 + }, + { + "epoch": 0.5282717282717283, + "grad_norm": 1.234375, + "learning_rate": 0.0002, + "loss": 0.9637, + "step": 661 + }, + { + "epoch": 0.5290709290709291, + "grad_norm": 1.71875, + "learning_rate": 0.0002, + "loss": 0.9757, + "step": 662 + }, + { + "epoch": 0.5298701298701298, + "grad_norm": 1.59375, + "learning_rate": 0.0002, + "loss": 0.9603, + "step": 663 + }, + { + "epoch": 0.5306693306693306, + "grad_norm": 1.4453125, + "learning_rate": 0.0002, + "loss": 0.9639, + "step": 664 + }, + { + "epoch": 0.5314685314685315, + "grad_norm": 1.40625, + "learning_rate": 0.0002, + "loss": 0.9722, + "step": 665 + }, + { + "epoch": 0.5322677322677323, + "grad_norm": 1.28125, + "learning_rate": 0.0002, + "loss": 0.9585, + "step": 666 + }, + { + "epoch": 0.5330669330669331, + "grad_norm": 1.140625, + "learning_rate": 0.0002, + "loss": 0.957, + "step": 667 + }, + { + "epoch": 0.5338661338661339, + "grad_norm": 1.5703125, + "learning_rate": 0.0002, + "loss": 0.957, + "step": 668 + }, + { + "epoch": 0.5346653346653346, + "grad_norm": 1.4921875, + "learning_rate": 0.0002, + "loss": 0.9568, + "step": 669 + }, + { + "epoch": 0.5354645354645354, + "grad_norm": 1.1015625, + "learning_rate": 0.0002, + "loss": 0.9592, + "step": 670 + }, + { + "epoch": 0.5362637362637362, + "grad_norm": 1.0078125, + "learning_rate": 0.0002, + "loss": 0.9539, + "step": 671 + }, + { + "epoch": 0.5370629370629371, + "grad_norm": 1.421875, + "learning_rate": 0.0002, + "loss": 0.9659, + "step": 672 + }, + { + "epoch": 0.5378621378621379, + "grad_norm": 1.3203125, + "learning_rate": 0.0002, + "loss": 0.9546, + "step": 673 + }, + { + "epoch": 0.5386613386613387, + "grad_norm": 1.1953125, + "learning_rate": 0.0002, + "loss": 0.9541, + "step": 674 + }, + { + "epoch": 0.5394605394605395, + "grad_norm": 1.1484375, + "learning_rate": 0.0002, + "loss": 0.9562, + "step": 675 + }, + { + "epoch": 0.5402597402597402, + "grad_norm": 1.15625, + "learning_rate": 0.0002, + "loss": 0.9588, + "step": 676 + }, + { + "epoch": 0.541058941058941, + "grad_norm": 1.046875, + "learning_rate": 0.0002, + "loss": 0.9748, + "step": 677 + }, + { + "epoch": 0.5418581418581419, + "grad_norm": 1.25, + "learning_rate": 0.0002, + "loss": 0.9585, + "step": 678 + }, + { + "epoch": 0.5426573426573427, + "grad_norm": 1.1328125, + "learning_rate": 0.0002, + "loss": 0.9598, + "step": 679 + }, + { + "epoch": 0.5434565434565435, + "grad_norm": 1.1484375, + "learning_rate": 0.0002, + "loss": 0.9593, + "step": 680 + }, + { + "epoch": 0.5442557442557443, + "grad_norm": 1.0546875, + "learning_rate": 0.0002, + "loss": 0.9593, + "step": 681 + }, + { + "epoch": 0.545054945054945, + "grad_norm": 1.203125, + "learning_rate": 0.0002, + "loss": 0.9662, + "step": 682 + }, + { + "epoch": 0.5458541458541458, + "grad_norm": 0.9921875, + "learning_rate": 0.0002, + "loss": 0.9631, + "step": 683 + }, + { + "epoch": 0.5466533466533466, + "grad_norm": 1.3203125, + "learning_rate": 0.0002, + "loss": 0.9629, + "step": 684 + }, + { + "epoch": 0.5474525474525475, + "grad_norm": 1.0703125, + "learning_rate": 0.0002, + "loss": 0.9607, + "step": 685 + }, + { + "epoch": 0.5482517482517483, + "grad_norm": 1.3515625, + "learning_rate": 0.0002, + "loss": 0.9548, + "step": 686 + }, + { + "epoch": 0.5490509490509491, + "grad_norm": 1.0859375, + "learning_rate": 0.0002, + "loss": 0.9686, + "step": 687 + }, + { + "epoch": 0.5498501498501499, + "grad_norm": 1.3359375, + "learning_rate": 0.0002, + "loss": 0.9729, + "step": 688 + }, + { + "epoch": 0.5506493506493506, + "grad_norm": 1.046875, + "learning_rate": 0.0002, + "loss": 0.9578, + "step": 689 + }, + { + "epoch": 0.5514485514485514, + "grad_norm": 1.9609375, + "learning_rate": 0.0002, + "loss": 0.9595, + "step": 690 + }, + { + "epoch": 0.5522477522477522, + "grad_norm": 1.4375, + "learning_rate": 0.0002, + "loss": 0.9652, + "step": 691 + }, + { + "epoch": 0.5530469530469531, + "grad_norm": 2.015625, + "learning_rate": 0.0002, + "loss": 0.9667, + "step": 692 + }, + { + "epoch": 0.5538461538461539, + "grad_norm": 1.9296875, + "learning_rate": 0.0002, + "loss": 0.9716, + "step": 693 + }, + { + "epoch": 0.5546453546453547, + "grad_norm": 1.1875, + "learning_rate": 0.0002, + "loss": 0.9626, + "step": 694 + }, + { + "epoch": 0.5554445554445554, + "grad_norm": 1.1796875, + "learning_rate": 0.0002, + "loss": 0.9607, + "step": 695 + }, + { + "epoch": 0.5562437562437562, + "grad_norm": 1.015625, + "learning_rate": 0.0002, + "loss": 0.9571, + "step": 696 + }, + { + "epoch": 0.557042957042957, + "grad_norm": 1.046875, + "learning_rate": 0.0002, + "loss": 0.9725, + "step": 697 + }, + { + "epoch": 0.5578421578421578, + "grad_norm": 1.0546875, + "learning_rate": 0.0002, + "loss": 0.9573, + "step": 698 + }, + { + "epoch": 0.5586413586413587, + "grad_norm": 0.93359375, + "learning_rate": 0.0002, + "loss": 0.9757, + "step": 699 + }, + { + "epoch": 0.5594405594405595, + "grad_norm": 0.93359375, + "learning_rate": 0.0002, + "loss": 0.9547, + "step": 700 + }, + { + "epoch": 0.5602397602397602, + "grad_norm": 0.76953125, + "learning_rate": 0.0002, + "loss": 0.9639, + "step": 701 + }, + { + "epoch": 0.561038961038961, + "grad_norm": 0.79296875, + "learning_rate": 0.0002, + "loss": 0.9574, + "step": 702 + }, + { + "epoch": 0.5618381618381618, + "grad_norm": 0.61328125, + "learning_rate": 0.0002, + "loss": 0.9612, + "step": 703 + }, + { + "epoch": 0.5626373626373626, + "grad_norm": 0.70703125, + "learning_rate": 0.0002, + "loss": 0.9603, + "step": 704 + }, + { + "epoch": 0.5634365634365635, + "grad_norm": 0.609375, + "learning_rate": 0.0002, + "loss": 0.9559, + "step": 705 + }, + { + "epoch": 0.5642357642357643, + "grad_norm": 0.68359375, + "learning_rate": 0.0002, + "loss": 0.9654, + "step": 706 + }, + { + "epoch": 0.5650349650349651, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9596, + "step": 707 + }, + { + "epoch": 0.5658341658341658, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.9577, + "step": 708 + }, + { + "epoch": 0.5666333666333666, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.9477, + "step": 709 + }, + { + "epoch": 0.5674325674325674, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.9544, + "step": 710 + }, + { + "epoch": 0.5682317682317682, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9678, + "step": 711 + }, + { + "epoch": 0.5690309690309691, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9547, + "step": 712 + }, + { + "epoch": 0.5698301698301699, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.952, + "step": 713 + }, + { + "epoch": 0.5706293706293706, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.9449, + "step": 714 + }, + { + "epoch": 0.5714285714285714, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9617, + "step": 715 + }, + { + "epoch": 0.5722277722277722, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9499, + "step": 716 + }, + { + "epoch": 0.573026973026973, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9448, + "step": 717 + }, + { + "epoch": 0.5738261738261738, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.9566, + "step": 718 + }, + { + "epoch": 0.5746253746253747, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.9474, + "step": 719 + }, + { + "epoch": 0.5754245754245755, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9512, + "step": 720 + }, + { + "epoch": 0.5762237762237762, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9597, + "step": 721 + }, + { + "epoch": 0.577022977022977, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9592, + "step": 722 + }, + { + "epoch": 0.5778221778221778, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9454, + "step": 723 + }, + { + "epoch": 0.5786213786213786, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.9474, + "step": 724 + }, + { + "epoch": 0.5794205794205795, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9498, + "step": 725 + }, + { + "epoch": 0.5802197802197803, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9513, + "step": 726 + }, + { + "epoch": 0.581018981018981, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9618, + "step": 727 + }, + { + "epoch": 0.5818181818181818, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.948, + "step": 728 + }, + { + "epoch": 0.5826173826173826, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.9427, + "step": 729 + }, + { + "epoch": 0.5834165834165834, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9506, + "step": 730 + }, + { + "epoch": 0.5842157842157842, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9627, + "step": 731 + }, + { + "epoch": 0.5850149850149851, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9563, + "step": 732 + }, + { + "epoch": 0.5858141858141858, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.9508, + "step": 733 + }, + { + "epoch": 0.5866133866133866, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9634, + "step": 734 + }, + { + "epoch": 0.5874125874125874, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9412, + "step": 735 + }, + { + "epoch": 0.5882117882117882, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9478, + "step": 736 + }, + { + "epoch": 0.589010989010989, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9461, + "step": 737 + }, + { + "epoch": 0.5898101898101898, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.9528, + "step": 738 + }, + { + "epoch": 0.5906093906093907, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9563, + "step": 739 + }, + { + "epoch": 0.5914085914085914, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.9505, + "step": 740 + }, + { + "epoch": 0.5922077922077922, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.9631, + "step": 741 + }, + { + "epoch": 0.593006993006993, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.9508, + "step": 742 + }, + { + "epoch": 0.5938061938061938, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9393, + "step": 743 + }, + { + "epoch": 0.5946053946053946, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.9639, + "step": 744 + }, + { + "epoch": 0.5954045954045954, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.9493, + "step": 745 + }, + { + "epoch": 0.5962037962037962, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.9542, + "step": 746 + }, + { + "epoch": 0.597002997002997, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.9497, + "step": 747 + }, + { + "epoch": 0.5978021978021978, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9544, + "step": 748 + }, + { + "epoch": 0.5986013986013986, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9514, + "step": 749 + }, + { + "epoch": 0.5994005994005994, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.9518, + "step": 750 + }, + { + "epoch": 0.6001998001998002, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9461, + "step": 751 + }, + { + "epoch": 0.600999000999001, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.9509, + "step": 752 + }, + { + "epoch": 0.6017982017982018, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9563, + "step": 753 + }, + { + "epoch": 0.6025974025974026, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9607, + "step": 754 + }, + { + "epoch": 0.6033966033966034, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9561, + "step": 755 + }, + { + "epoch": 0.6041958041958042, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.9481, + "step": 756 + }, + { + "epoch": 0.604995004995005, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.9566, + "step": 757 + }, + { + "epoch": 0.6057942057942058, + "grad_norm": 0.703125, + "learning_rate": 0.0002, + "loss": 0.9807, + "step": 758 + }, + { + "epoch": 0.6065934065934065, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.9499, + "step": 759 + }, + { + "epoch": 0.6073926073926074, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.9561, + "step": 760 + }, + { + "epoch": 0.6081918081918082, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9444, + "step": 761 + }, + { + "epoch": 0.608991008991009, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9606, + "step": 762 + }, + { + "epoch": 0.6097902097902098, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.9558, + "step": 763 + }, + { + "epoch": 0.6105894105894106, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.9578, + "step": 764 + }, + { + "epoch": 0.6113886113886113, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.9636, + "step": 765 + }, + { + "epoch": 0.6121878121878122, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.9478, + "step": 766 + }, + { + "epoch": 0.612987012987013, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9505, + "step": 767 + }, + { + "epoch": 0.6137862137862138, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.949, + "step": 768 + }, + { + "epoch": 0.6145854145854146, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.9581, + "step": 769 + }, + { + "epoch": 0.6153846153846154, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.9566, + "step": 770 + }, + { + "epoch": 0.6161838161838162, + "grad_norm": 0.9375, + "learning_rate": 0.0002, + "loss": 0.9776, + "step": 771 + }, + { + "epoch": 0.6169830169830169, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9523, + "step": 772 + }, + { + "epoch": 0.6177822177822178, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.9474, + "step": 773 + }, + { + "epoch": 0.6185814185814186, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.9549, + "step": 774 + }, + { + "epoch": 0.6193806193806194, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.9621, + "step": 775 + }, + { + "epoch": 0.6201798201798202, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9582, + "step": 776 + }, + { + "epoch": 0.620979020979021, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.9553, + "step": 777 + }, + { + "epoch": 0.6217782217782217, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.9599, + "step": 778 + }, + { + "epoch": 0.6225774225774225, + "grad_norm": 0.63671875, + "learning_rate": 0.0002, + "loss": 0.9587, + "step": 779 + }, + { + "epoch": 0.6233766233766234, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.954, + "step": 780 + }, + { + "epoch": 0.6241758241758242, + "grad_norm": 0.60546875, + "learning_rate": 0.0002, + "loss": 0.9522, + "step": 781 + }, + { + "epoch": 0.624975024975025, + "grad_norm": 0.63671875, + "learning_rate": 0.0002, + "loss": 0.9682, + "step": 782 + }, + { + "epoch": 0.6257742257742258, + "grad_norm": 0.58203125, + "learning_rate": 0.0002, + "loss": 0.9605, + "step": 783 + }, + { + "epoch": 0.6265734265734266, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.965, + "step": 784 + }, + { + "epoch": 0.6273726273726273, + "grad_norm": 0.58203125, + "learning_rate": 0.0002, + "loss": 0.9615, + "step": 785 + }, + { + "epoch": 0.6281718281718282, + "grad_norm": 0.578125, + "learning_rate": 0.0002, + "loss": 0.9594, + "step": 786 + }, + { + "epoch": 0.628971028971029, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.9503, + "step": 787 + }, + { + "epoch": 0.6297702297702298, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.9591, + "step": 788 + }, + { + "epoch": 0.6305694305694306, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.9525, + "step": 789 + }, + { + "epoch": 0.6313686313686314, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.947, + "step": 790 + }, + { + "epoch": 0.6321678321678321, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.9504, + "step": 791 + }, + { + "epoch": 0.6329670329670329, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.9487, + "step": 792 + }, + { + "epoch": 0.6337662337662338, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.9546, + "step": 793 + }, + { + "epoch": 0.6345654345654346, + "grad_norm": 0.61328125, + "learning_rate": 0.0002, + "loss": 0.9521, + "step": 794 + }, + { + "epoch": 0.6353646353646354, + "grad_norm": 0.6953125, + "learning_rate": 0.0002, + "loss": 0.9547, + "step": 795 + }, + { + "epoch": 0.6361638361638362, + "grad_norm": 0.6875, + "learning_rate": 0.0002, + "loss": 0.9538, + "step": 796 + }, + { + "epoch": 0.6369630369630369, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.9424, + "step": 797 + }, + { + "epoch": 0.6377622377622377, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9457, + "step": 798 + }, + { + "epoch": 0.6385614385614385, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.9586, + "step": 799 + }, + { + "epoch": 0.6393606393606394, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9511, + "step": 800 + }, + { + "epoch": 0.6401598401598402, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9519, + "step": 801 + }, + { + "epoch": 0.640959040959041, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.9498, + "step": 802 + }, + { + "epoch": 0.6417582417582418, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9484, + "step": 803 + }, + { + "epoch": 0.6425574425574425, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9541, + "step": 804 + }, + { + "epoch": 0.6433566433566433, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9462, + "step": 805 + }, + { + "epoch": 0.6441558441558441, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9506, + "step": 806 + }, + { + "epoch": 0.644955044955045, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9571, + "step": 807 + }, + { + "epoch": 0.6457542457542458, + "grad_norm": 0.7265625, + "learning_rate": 0.0002, + "loss": 0.9455, + "step": 808 + }, + { + "epoch": 0.6465534465534466, + "grad_norm": 0.96875, + "learning_rate": 0.0002, + "loss": 0.9469, + "step": 809 + }, + { + "epoch": 0.6473526473526473, + "grad_norm": 1.359375, + "learning_rate": 0.0002, + "loss": 0.9467, + "step": 810 + }, + { + "epoch": 0.6481518481518481, + "grad_norm": 0.7265625, + "learning_rate": 0.0002, + "loss": 0.953, + "step": 811 + }, + { + "epoch": 0.6489510489510489, + "grad_norm": 0.97265625, + "learning_rate": 0.0002, + "loss": 0.9639, + "step": 812 + }, + { + "epoch": 0.6497502497502498, + "grad_norm": 0.84375, + "learning_rate": 0.0002, + "loss": 0.9529, + "step": 813 + }, + { + "epoch": 0.6505494505494506, + "grad_norm": 0.578125, + "learning_rate": 0.0002, + "loss": 0.9507, + "step": 814 + }, + { + "epoch": 0.6513486513486514, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9523, + "step": 815 + }, + { + "epoch": 0.6521478521478522, + "grad_norm": 0.59375, + "learning_rate": 0.0002, + "loss": 0.9541, + "step": 816 + }, + { + "epoch": 0.6529470529470529, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.9484, + "step": 817 + }, + { + "epoch": 0.6537462537462537, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.9383, + "step": 818 + }, + { + "epoch": 0.6545454545454545, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9606, + "step": 819 + }, + { + "epoch": 0.6553446553446554, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.9556, + "step": 820 + }, + { + "epoch": 0.6561438561438562, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9489, + "step": 821 + }, + { + "epoch": 0.656943056943057, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9447, + "step": 822 + }, + { + "epoch": 0.6577422577422577, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.9572, + "step": 823 + }, + { + "epoch": 0.6585414585414585, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9479, + "step": 824 + }, + { + "epoch": 0.6593406593406593, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9465, + "step": 825 + }, + { + "epoch": 0.6601398601398601, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9495, + "step": 826 + }, + { + "epoch": 0.660939060939061, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.9516, + "step": 827 + }, + { + "epoch": 0.6617382617382618, + "grad_norm": 1.03125, + "learning_rate": 0.0002, + "loss": 0.9553, + "step": 828 + }, + { + "epoch": 0.6625374625374625, + "grad_norm": 1.7265625, + "learning_rate": 0.0002, + "loss": 0.9652, + "step": 829 + }, + { + "epoch": 0.6633366633366633, + "grad_norm": 0.78515625, + "learning_rate": 0.0002, + "loss": 0.9591, + "step": 830 + }, + { + "epoch": 0.6641358641358641, + "grad_norm": 1.7265625, + "learning_rate": 0.0002, + "loss": 0.9634, + "step": 831 + }, + { + "epoch": 0.6649350649350649, + "grad_norm": 0.98828125, + "learning_rate": 0.0002, + "loss": 0.9629, + "step": 832 + }, + { + "epoch": 0.6657342657342658, + "grad_norm": 2.734375, + "learning_rate": 0.0002, + "loss": 0.996, + "step": 833 + }, + { + "epoch": 0.6665334665334666, + "grad_norm": 2.109375, + "learning_rate": 0.0002, + "loss": 0.9801, + "step": 834 + }, + { + "epoch": 0.6673326673326674, + "grad_norm": 2.015625, + "learning_rate": 0.0002, + "loss": 0.9717, + "step": 835 + }, + { + "epoch": 0.6681318681318681, + "grad_norm": 1.5546875, + "learning_rate": 0.0002, + "loss": 0.9562, + "step": 836 + }, + { + "epoch": 0.6689310689310689, + "grad_norm": 2.90625, + "learning_rate": 0.0002, + "loss": 0.9736, + "step": 837 + }, + { + "epoch": 0.6697302697302697, + "grad_norm": 2.453125, + "learning_rate": 0.0002, + "loss": 0.978, + "step": 838 + }, + { + "epoch": 0.6705294705294705, + "grad_norm": 1.953125, + "learning_rate": 0.0002, + "loss": 0.9626, + "step": 839 + }, + { + "epoch": 0.6713286713286714, + "grad_norm": 1.703125, + "learning_rate": 0.0002, + "loss": 0.9681, + "step": 840 + }, + { + "epoch": 0.6721278721278722, + "grad_norm": 1.7421875, + "learning_rate": 0.0002, + "loss": 0.9609, + "step": 841 + }, + { + "epoch": 0.6729270729270729, + "grad_norm": 1.5390625, + "learning_rate": 0.0002, + "loss": 0.9685, + "step": 842 + }, + { + "epoch": 0.6737262737262737, + "grad_norm": 1.8671875, + "learning_rate": 0.0002, + "loss": 0.963, + "step": 843 + }, + { + "epoch": 0.6745254745254745, + "grad_norm": 1.5078125, + "learning_rate": 0.0002, + "loss": 0.9596, + "step": 844 + }, + { + "epoch": 0.6753246753246753, + "grad_norm": 1.7421875, + "learning_rate": 0.0002, + "loss": 0.9441, + "step": 845 + }, + { + "epoch": 0.6761238761238761, + "grad_norm": 1.6328125, + "learning_rate": 0.0002, + "loss": 0.9487, + "step": 846 + }, + { + "epoch": 0.676923076923077, + "grad_norm": 1.6875, + "learning_rate": 0.0002, + "loss": 0.9604, + "step": 847 + }, + { + "epoch": 0.6777222777222778, + "grad_norm": 1.4921875, + "learning_rate": 0.0002, + "loss": 0.953, + "step": 848 + }, + { + "epoch": 0.6785214785214785, + "grad_norm": 1.515625, + "learning_rate": 0.0002, + "loss": 0.9443, + "step": 849 + }, + { + "epoch": 0.6793206793206793, + "grad_norm": 1.0703125, + "learning_rate": 0.0002, + "loss": 0.9379, + "step": 850 + }, + { + "epoch": 0.6801198801198801, + "grad_norm": 1.5234375, + "learning_rate": 0.0002, + "loss": 0.9687, + "step": 851 + }, + { + "epoch": 0.6809190809190809, + "grad_norm": 1.3984375, + "learning_rate": 0.0002, + "loss": 0.9617, + "step": 852 + }, + { + "epoch": 0.6817182817182817, + "grad_norm": 1.9453125, + "learning_rate": 0.0002, + "loss": 0.9533, + "step": 853 + }, + { + "epoch": 0.6825174825174826, + "grad_norm": 1.59375, + "learning_rate": 0.0002, + "loss": 0.9519, + "step": 854 + }, + { + "epoch": 0.6833166833166833, + "grad_norm": 1.109375, + "learning_rate": 0.0002, + "loss": 0.957, + "step": 855 + }, + { + "epoch": 0.6841158841158841, + "grad_norm": 1.0078125, + "learning_rate": 0.0002, + "loss": 0.9442, + "step": 856 + }, + { + "epoch": 0.6849150849150849, + "grad_norm": 1.609375, + "learning_rate": 0.0002, + "loss": 0.9594, + "step": 857 + }, + { + "epoch": 0.6857142857142857, + "grad_norm": 1.328125, + "learning_rate": 0.0002, + "loss": 0.947, + "step": 858 + }, + { + "epoch": 0.6865134865134865, + "grad_norm": 1.5390625, + "learning_rate": 0.0002, + "loss": 0.9541, + "step": 859 + }, + { + "epoch": 0.6873126873126874, + "grad_norm": 1.375, + "learning_rate": 0.0002, + "loss": 0.9641, + "step": 860 + }, + { + "epoch": 0.6881118881118881, + "grad_norm": 0.953125, + "learning_rate": 0.0002, + "loss": 0.9531, + "step": 861 + }, + { + "epoch": 0.6889110889110889, + "grad_norm": 0.90234375, + "learning_rate": 0.0002, + "loss": 0.9482, + "step": 862 + }, + { + "epoch": 0.6897102897102897, + "grad_norm": 1.2265625, + "learning_rate": 0.0002, + "loss": 0.9451, + "step": 863 + }, + { + "epoch": 0.6905094905094905, + "grad_norm": 0.78515625, + "learning_rate": 0.0002, + "loss": 0.9553, + "step": 864 + }, + { + "epoch": 0.6913086913086913, + "grad_norm": 1.28125, + "learning_rate": 0.0002, + "loss": 0.9599, + "step": 865 + }, + { + "epoch": 0.6921078921078921, + "grad_norm": 0.9375, + "learning_rate": 0.0002, + "loss": 0.9502, + "step": 866 + }, + { + "epoch": 0.692907092907093, + "grad_norm": 1.421875, + "learning_rate": 0.0002, + "loss": 0.9555, + "step": 867 + }, + { + "epoch": 0.6937062937062937, + "grad_norm": 1.2734375, + "learning_rate": 0.0002, + "loss": 0.958, + "step": 868 + }, + { + "epoch": 0.6945054945054945, + "grad_norm": 1.4296875, + "learning_rate": 0.0002, + "loss": 0.9568, + "step": 869 + }, + { + "epoch": 0.6953046953046953, + "grad_norm": 1.171875, + "learning_rate": 0.0002, + "loss": 0.9478, + "step": 870 + }, + { + "epoch": 0.6961038961038961, + "grad_norm": 1.3984375, + "learning_rate": 0.0002, + "loss": 0.9406, + "step": 871 + }, + { + "epoch": 0.6969030969030969, + "grad_norm": 1.28125, + "learning_rate": 0.0002, + "loss": 0.9509, + "step": 872 + }, + { + "epoch": 0.6977022977022977, + "grad_norm": 1.0390625, + "learning_rate": 0.0002, + "loss": 0.9464, + "step": 873 + }, + { + "epoch": 0.6985014985014985, + "grad_norm": 1.015625, + "learning_rate": 0.0002, + "loss": 0.949, + "step": 874 + }, + { + "epoch": 0.6993006993006993, + "grad_norm": 1.640625, + "learning_rate": 0.0002, + "loss": 0.9852, + "step": 875 + }, + { + "epoch": 0.7000999000999001, + "grad_norm": 1.375, + "learning_rate": 0.0002, + "loss": 0.9498, + "step": 876 + }, + { + "epoch": 0.7008991008991009, + "grad_norm": 0.890625, + "learning_rate": 0.0002, + "loss": 0.9519, + "step": 877 + }, + { + "epoch": 0.7016983016983017, + "grad_norm": 0.8125, + "learning_rate": 0.0002, + "loss": 0.9516, + "step": 878 + }, + { + "epoch": 0.7024975024975025, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.9485, + "step": 879 + }, + { + "epoch": 0.7032967032967034, + "grad_norm": 0.6328125, + "learning_rate": 0.0002, + "loss": 0.9507, + "step": 880 + }, + { + "epoch": 0.7040959040959041, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9412, + "step": 881 + }, + { + "epoch": 0.7048951048951049, + "grad_norm": 0.6484375, + "learning_rate": 0.0002, + "loss": 0.964, + "step": 882 + }, + { + "epoch": 0.7056943056943057, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.955, + "step": 883 + }, + { + "epoch": 0.7064935064935065, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.9485, + "step": 884 + }, + { + "epoch": 0.7072927072927073, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9502, + "step": 885 + }, + { + "epoch": 0.7080919080919081, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.9631, + "step": 886 + }, + { + "epoch": 0.7088911088911088, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.9518, + "step": 887 + }, + { + "epoch": 0.7096903096903097, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.9429, + "step": 888 + }, + { + "epoch": 0.7104895104895105, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9486, + "step": 889 + }, + { + "epoch": 0.7112887112887113, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.9525, + "step": 890 + }, + { + "epoch": 0.7120879120879121, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9462, + "step": 891 + }, + { + "epoch": 0.7128871128871129, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.9491, + "step": 892 + }, + { + "epoch": 0.7136863136863136, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9531, + "step": 893 + }, + { + "epoch": 0.7144855144855145, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.956, + "step": 894 + }, + { + "epoch": 0.7152847152847153, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.9547, + "step": 895 + }, + { + "epoch": 0.7160839160839161, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.9549, + "step": 896 + }, + { + "epoch": 0.7168831168831169, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9576, + "step": 897 + }, + { + "epoch": 0.7176823176823177, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.9402, + "step": 898 + }, + { + "epoch": 0.7184815184815185, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.9507, + "step": 899 + }, + { + "epoch": 0.7192807192807192, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.9507, + "step": 900 + }, + { + "epoch": 0.72007992007992, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.9494, + "step": 901 + }, + { + "epoch": 0.7208791208791209, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.9487, + "step": 902 + }, + { + "epoch": 0.7216783216783217, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.9491, + "step": 903 + }, + { + "epoch": 0.7224775224775225, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.957, + "step": 904 + }, + { + "epoch": 0.7232767232767233, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.9517, + "step": 905 + }, + { + "epoch": 0.724075924075924, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.9421, + "step": 906 + }, + { + "epoch": 0.7248751248751248, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.947, + "step": 907 + }, + { + "epoch": 0.7256743256743257, + "grad_norm": 0.59765625, + "learning_rate": 0.0002, + "loss": 0.9521, + "step": 908 + }, + { + "epoch": 0.7264735264735265, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.9582, + "step": 909 + }, + { + "epoch": 0.7272727272727273, + "grad_norm": 0.78125, + "learning_rate": 0.0002, + "loss": 0.9558, + "step": 910 + }, + { + "epoch": 0.7280719280719281, + "grad_norm": 0.671875, + "learning_rate": 0.0002, + "loss": 0.9522, + "step": 911 + }, + { + "epoch": 0.7288711288711289, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.952, + "step": 912 + }, + { + "epoch": 0.7296703296703296, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9445, + "step": 913 + }, + { + "epoch": 0.7304695304695304, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.9388, + "step": 914 + }, + { + "epoch": 0.7312687312687313, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.9478, + "step": 915 + }, + { + "epoch": 0.7320679320679321, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9517, + "step": 916 + }, + { + "epoch": 0.7328671328671329, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.9487, + "step": 917 + }, + { + "epoch": 0.7336663336663337, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9378, + "step": 918 + }, + { + "epoch": 0.7344655344655344, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.946, + "step": 919 + }, + { + "epoch": 0.7352647352647352, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9529, + "step": 920 + }, + { + "epoch": 0.736063936063936, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9515, + "step": 921 + }, + { + "epoch": 0.7368631368631369, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9501, + "step": 922 + }, + { + "epoch": 0.7376623376623377, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9452, + "step": 923 + }, + { + "epoch": 0.7384615384615385, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9516, + "step": 924 + }, + { + "epoch": 0.7392607392607392, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.9302, + "step": 925 + }, + { + "epoch": 0.74005994005994, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.9502, + "step": 926 + }, + { + "epoch": 0.7408591408591408, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9408, + "step": 927 + }, + { + "epoch": 0.7416583416583417, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.9509, + "step": 928 + }, + { + "epoch": 0.7424575424575425, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.9541, + "step": 929 + }, + { + "epoch": 0.7432567432567433, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9463, + "step": 930 + }, + { + "epoch": 0.7440559440559441, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9545, + "step": 931 + }, + { + "epoch": 0.7448551448551448, + "grad_norm": 0.875, + "learning_rate": 0.0002, + "loss": 0.9542, + "step": 932 + }, + { + "epoch": 0.7456543456543456, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.9428, + "step": 933 + }, + { + "epoch": 0.7464535464535464, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9433, + "step": 934 + }, + { + "epoch": 0.7472527472527473, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.94, + "step": 935 + }, + { + "epoch": 0.7480519480519481, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9431, + "step": 936 + }, + { + "epoch": 0.7488511488511489, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9539, + "step": 937 + }, + { + "epoch": 0.7496503496503496, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.9458, + "step": 938 + }, + { + "epoch": 0.7504495504495504, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9423, + "step": 939 + }, + { + "epoch": 0.7512487512487512, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.9488, + "step": 940 + }, + { + "epoch": 0.752047952047952, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9415, + "step": 941 + }, + { + "epoch": 0.7528471528471529, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9544, + "step": 942 + }, + { + "epoch": 0.7536463536463537, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.9465, + "step": 943 + }, + { + "epoch": 0.7544455544455545, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9419, + "step": 944 + }, + { + "epoch": 0.7552447552447552, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.9473, + "step": 945 + }, + { + "epoch": 0.756043956043956, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9431, + "step": 946 + }, + { + "epoch": 0.7568431568431568, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.9423, + "step": 947 + }, + { + "epoch": 0.7576423576423577, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9454, + "step": 948 + }, + { + "epoch": 0.7584415584415585, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9414, + "step": 949 + }, + { + "epoch": 0.7592407592407593, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9487, + "step": 950 + }, + { + "epoch": 0.76003996003996, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.9491, + "step": 951 + }, + { + "epoch": 0.7608391608391608, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.9462, + "step": 952 + }, + { + "epoch": 0.7616383616383616, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.9394, + "step": 953 + }, + { + "epoch": 0.7624375624375624, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.9415, + "step": 954 + }, + { + "epoch": 0.7632367632367633, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.9429, + "step": 955 + }, + { + "epoch": 0.7640359640359641, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9425, + "step": 956 + }, + { + "epoch": 0.7648351648351648, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.9479, + "step": 957 + }, + { + "epoch": 0.7656343656343656, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9508, + "step": 958 + }, + { + "epoch": 0.7664335664335664, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.9442, + "step": 959 + }, + { + "epoch": 0.7672327672327672, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9442, + "step": 960 + }, + { + "epoch": 0.768031968031968, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.9427, + "step": 961 + }, + { + "epoch": 0.7688311688311689, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9517, + "step": 962 + }, + { + "epoch": 0.7696303696303697, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.936, + "step": 963 + }, + { + "epoch": 0.7704295704295704, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9431, + "step": 964 + }, + { + "epoch": 0.7712287712287712, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.9467, + "step": 965 + }, + { + "epoch": 0.772027972027972, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.9394, + "step": 966 + }, + { + "epoch": 0.7728271728271728, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.9481, + "step": 967 + }, + { + "epoch": 0.7736263736263737, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.9515, + "step": 968 + }, + { + "epoch": 0.7744255744255745, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9535, + "step": 969 + }, + { + "epoch": 0.7752247752247752, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.9392, + "step": 970 + }, + { + "epoch": 0.776023976023976, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.9418, + "step": 971 + }, + { + "epoch": 0.7768231768231768, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.936, + "step": 972 + }, + { + "epoch": 0.7776223776223776, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9343, + "step": 973 + }, + { + "epoch": 0.7784215784215784, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9445, + "step": 974 + }, + { + "epoch": 0.7792207792207793, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.931, + "step": 975 + }, + { + "epoch": 0.7800199800199801, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.9308, + "step": 976 + }, + { + "epoch": 0.7808191808191808, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.9491, + "step": 977 + }, + { + "epoch": 0.7816183816183816, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9509, + "step": 978 + }, + { + "epoch": 0.7824175824175824, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.9433, + "step": 979 + }, + { + "epoch": 0.7832167832167832, + "grad_norm": 1.765625, + "learning_rate": 0.0002, + "loss": 0.9743, + "step": 980 + }, + { + "epoch": 0.784015984015984, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9467, + "step": 981 + }, + { + "epoch": 0.7848151848151849, + "grad_norm": 0.66796875, + "learning_rate": 0.0002, + "loss": 0.9535, + "step": 982 + }, + { + "epoch": 0.7856143856143856, + "grad_norm": 0.578125, + "learning_rate": 0.0002, + "loss": 0.9484, + "step": 983 + }, + { + "epoch": 0.7864135864135864, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9344, + "step": 984 + }, + { + "epoch": 0.7872127872127872, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.9354, + "step": 985 + }, + { + "epoch": 0.788011988011988, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9421, + "step": 986 + }, + { + "epoch": 0.7888111888111888, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9457, + "step": 987 + }, + { + "epoch": 0.7896103896103897, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.9405, + "step": 988 + }, + { + "epoch": 0.7904095904095904, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9322, + "step": 989 + }, + { + "epoch": 0.7912087912087912, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.9431, + "step": 990 + }, + { + "epoch": 0.792007992007992, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9385, + "step": 991 + }, + { + "epoch": 0.7928071928071928, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9457, + "step": 992 + }, + { + "epoch": 0.7936063936063936, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.948, + "step": 993 + }, + { + "epoch": 0.7944055944055944, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9517, + "step": 994 + }, + { + "epoch": 0.7952047952047953, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.937, + "step": 995 + }, + { + "epoch": 0.796003996003996, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9394, + "step": 996 + }, + { + "epoch": 0.7968031968031968, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9411, + "step": 997 + }, + { + "epoch": 0.7976023976023976, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9384, + "step": 998 + }, + { + "epoch": 0.7984015984015984, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.9469, + "step": 999 + }, + { + "epoch": 0.7992007992007992, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9444, + "step": 1000 + }, + { + "epoch": 0.8, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.9444, + "step": 1001 + }, + { + "epoch": 0.8007992007992007, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9358, + "step": 1002 + }, + { + "epoch": 0.8015984015984016, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9524, + "step": 1003 + }, + { + "epoch": 0.8023976023976024, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.9425, + "step": 1004 + }, + { + "epoch": 0.8031968031968032, + "grad_norm": 0.625, + "learning_rate": 0.0002, + "loss": 0.9398, + "step": 1005 + }, + { + "epoch": 0.803996003996004, + "grad_norm": 0.859375, + "learning_rate": 0.0002, + "loss": 0.9393, + "step": 1006 + }, + { + "epoch": 0.8047952047952048, + "grad_norm": 1.0703125, + "learning_rate": 0.0002, + "loss": 0.9391, + "step": 1007 + }, + { + "epoch": 0.8055944055944056, + "grad_norm": 1.0234375, + "learning_rate": 0.0002, + "loss": 0.9534, + "step": 1008 + }, + { + "epoch": 0.8063936063936064, + "grad_norm": 0.62109375, + "learning_rate": 0.0002, + "loss": 0.939, + "step": 1009 + }, + { + "epoch": 0.8071928071928072, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.9419, + "step": 1010 + }, + { + "epoch": 0.807992007992008, + "grad_norm": 0.7734375, + "learning_rate": 0.0002, + "loss": 0.9404, + "step": 1011 + }, + { + "epoch": 0.8087912087912088, + "grad_norm": 0.6484375, + "learning_rate": 0.0002, + "loss": 0.9411, + "step": 1012 + }, + { + "epoch": 0.8095904095904096, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.946, + "step": 1013 + }, + { + "epoch": 0.8103896103896104, + "grad_norm": 0.65625, + "learning_rate": 0.0002, + "loss": 0.9412, + "step": 1014 + }, + { + "epoch": 0.8111888111888111, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9439, + "step": 1015 + }, + { + "epoch": 0.811988011988012, + "grad_norm": 0.60546875, + "learning_rate": 0.0002, + "loss": 0.9497, + "step": 1016 + }, + { + "epoch": 0.8127872127872128, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.9415, + "step": 1017 + }, + { + "epoch": 0.8135864135864136, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9309, + "step": 1018 + }, + { + "epoch": 0.8143856143856144, + "grad_norm": 0.76953125, + "learning_rate": 0.0002, + "loss": 0.9479, + "step": 1019 + }, + { + "epoch": 0.8151848151848152, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.9433, + "step": 1020 + }, + { + "epoch": 0.8159840159840159, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9438, + "step": 1021 + }, + { + "epoch": 0.8167832167832167, + "grad_norm": 0.7109375, + "learning_rate": 0.0002, + "loss": 0.9352, + "step": 1022 + }, + { + "epoch": 0.8175824175824176, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9438, + "step": 1023 + }, + { + "epoch": 0.8183816183816184, + "grad_norm": 0.640625, + "learning_rate": 0.0002, + "loss": 0.9396, + "step": 1024 + }, + { + "epoch": 0.8191808191808192, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.9516, + "step": 1025 + }, + { + "epoch": 0.81998001998002, + "grad_norm": 0.70703125, + "learning_rate": 0.0002, + "loss": 0.9346, + "step": 1026 + }, + { + "epoch": 0.8207792207792208, + "grad_norm": 0.94921875, + "learning_rate": 0.0002, + "loss": 0.9367, + "step": 1027 + }, + { + "epoch": 0.8215784215784215, + "grad_norm": 1.171875, + "learning_rate": 0.0002, + "loss": 0.9428, + "step": 1028 + }, + { + "epoch": 0.8223776223776224, + "grad_norm": 1.171875, + "learning_rate": 0.0002, + "loss": 0.9515, + "step": 1029 + }, + { + "epoch": 0.8231768231768232, + "grad_norm": 1.421875, + "learning_rate": 0.0002, + "loss": 0.9436, + "step": 1030 + }, + { + "epoch": 0.823976023976024, + "grad_norm": 0.88671875, + "learning_rate": 0.0002, + "loss": 0.9404, + "step": 1031 + }, + { + "epoch": 0.8247752247752248, + "grad_norm": 1.28125, + "learning_rate": 0.0002, + "loss": 0.9489, + "step": 1032 + }, + { + "epoch": 0.8255744255744256, + "grad_norm": 1.0546875, + "learning_rate": 0.0002, + "loss": 0.9523, + "step": 1033 + }, + { + "epoch": 0.8263736263736263, + "grad_norm": 1.3671875, + "learning_rate": 0.0002, + "loss": 0.9348, + "step": 1034 + }, + { + "epoch": 0.8271728271728271, + "grad_norm": 0.828125, + "learning_rate": 0.0002, + "loss": 0.9511, + "step": 1035 + }, + { + "epoch": 0.827972027972028, + "grad_norm": 1.046875, + "learning_rate": 0.0002, + "loss": 0.9498, + "step": 1036 + }, + { + "epoch": 0.8287712287712288, + "grad_norm": 1.1484375, + "learning_rate": 0.0002, + "loss": 0.9548, + "step": 1037 + }, + { + "epoch": 0.8295704295704296, + "grad_norm": 1.1015625, + "learning_rate": 0.0002, + "loss": 0.9519, + "step": 1038 + }, + { + "epoch": 0.8303696303696304, + "grad_norm": 1.0625, + "learning_rate": 0.0002, + "loss": 0.9453, + "step": 1039 + }, + { + "epoch": 0.8311688311688312, + "grad_norm": 1.0703125, + "learning_rate": 0.0002, + "loss": 0.9471, + "step": 1040 + }, + { + "epoch": 0.8319680319680319, + "grad_norm": 1.015625, + "learning_rate": 0.0002, + "loss": 0.9433, + "step": 1041 + }, + { + "epoch": 0.8327672327672327, + "grad_norm": 1.0859375, + "learning_rate": 0.0002, + "loss": 0.9504, + "step": 1042 + }, + { + "epoch": 0.8335664335664336, + "grad_norm": 0.98046875, + "learning_rate": 0.0002, + "loss": 0.9379, + "step": 1043 + }, + { + "epoch": 0.8343656343656344, + "grad_norm": 1.0703125, + "learning_rate": 0.0002, + "loss": 0.9442, + "step": 1044 + }, + { + "epoch": 0.8351648351648352, + "grad_norm": 0.83203125, + "learning_rate": 0.0002, + "loss": 0.9548, + "step": 1045 + }, + { + "epoch": 0.835964035964036, + "grad_norm": 0.73046875, + "learning_rate": 0.0002, + "loss": 0.9465, + "step": 1046 + }, + { + "epoch": 0.8367632367632367, + "grad_norm": 0.6875, + "learning_rate": 0.0002, + "loss": 0.945, + "step": 1047 + }, + { + "epoch": 0.8375624375624375, + "grad_norm": 0.6171875, + "learning_rate": 0.0002, + "loss": 0.939, + "step": 1048 + }, + { + "epoch": 0.8383616383616384, + "grad_norm": 0.65234375, + "learning_rate": 0.0002, + "loss": 0.9467, + "step": 1049 + }, + { + "epoch": 0.8391608391608392, + "grad_norm": 0.6015625, + "learning_rate": 0.0002, + "loss": 0.9439, + "step": 1050 + }, + { + "epoch": 0.83996003996004, + "grad_norm": 0.578125, + "learning_rate": 0.0002, + "loss": 0.9456, + "step": 1051 + }, + { + "epoch": 0.8407592407592408, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.9359, + "step": 1052 + }, + { + "epoch": 0.8415584415584415, + "grad_norm": 0.578125, + "learning_rate": 0.0002, + "loss": 0.9449, + "step": 1053 + }, + { + "epoch": 0.8423576423576423, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.9441, + "step": 1054 + }, + { + "epoch": 0.8431568431568431, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9414, + "step": 1055 + }, + { + "epoch": 0.843956043956044, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.944, + "step": 1056 + }, + { + "epoch": 0.8447552447552448, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.9407, + "step": 1057 + }, + { + "epoch": 0.8455544455544456, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.9339, + "step": 1058 + }, + { + "epoch": 0.8463536463536464, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.949, + "step": 1059 + }, + { + "epoch": 0.8471528471528471, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9826, + "step": 1060 + }, + { + "epoch": 0.8479520479520479, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9419, + "step": 1061 + }, + { + "epoch": 0.8487512487512487, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9446, + "step": 1062 + }, + { + "epoch": 0.8495504495504496, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9304, + "step": 1063 + }, + { + "epoch": 0.8503496503496504, + "grad_norm": 0.66796875, + "learning_rate": 0.0002, + "loss": 0.9671, + "step": 1064 + }, + { + "epoch": 0.8511488511488512, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9449, + "step": 1065 + }, + { + "epoch": 0.8519480519480519, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.9448, + "step": 1066 + }, + { + "epoch": 0.8527472527472527, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.9388, + "step": 1067 + }, + { + "epoch": 0.8535464535464535, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9416, + "step": 1068 + }, + { + "epoch": 0.8543456543456543, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.9331, + "step": 1069 + }, + { + "epoch": 0.8551448551448552, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.9439, + "step": 1070 + }, + { + "epoch": 0.855944055944056, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.9393, + "step": 1071 + }, + { + "epoch": 0.8567432567432568, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.946, + "step": 1072 + }, + { + "epoch": 0.8575424575424575, + "grad_norm": 0.59375, + "learning_rate": 0.0002, + "loss": 0.9413, + "step": 1073 + }, + { + "epoch": 0.8583416583416583, + "grad_norm": 0.6796875, + "learning_rate": 0.0002, + "loss": 0.9354, + "step": 1074 + }, + { + "epoch": 0.8591408591408591, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.9365, + "step": 1075 + }, + { + "epoch": 0.85994005994006, + "grad_norm": 0.640625, + "learning_rate": 0.0002, + "loss": 0.9671, + "step": 1076 + }, + { + "epoch": 0.8607392607392608, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9513, + "step": 1077 + }, + { + "epoch": 0.8615384615384616, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.9385, + "step": 1078 + }, + { + "epoch": 0.8623376623376623, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9421, + "step": 1079 + }, + { + "epoch": 0.8631368631368631, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.9312, + "step": 1080 + }, + { + "epoch": 0.8639360639360639, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.9371, + "step": 1081 + }, + { + "epoch": 0.8647352647352647, + "grad_norm": 0.609375, + "learning_rate": 0.0002, + "loss": 0.9436, + "step": 1082 + }, + { + "epoch": 0.8655344655344656, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 0.9632, + "step": 1083 + }, + { + "epoch": 0.8663336663336664, + "grad_norm": 0.60546875, + "learning_rate": 0.0002, + "loss": 0.9661, + "step": 1084 + }, + { + "epoch": 0.8671328671328671, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9284, + "step": 1085 + }, + { + "epoch": 0.8679320679320679, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9526, + "step": 1086 + }, + { + "epoch": 0.8687312687312687, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.943, + "step": 1087 + }, + { + "epoch": 0.8695304695304695, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9404, + "step": 1088 + }, + { + "epoch": 0.8703296703296703, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9402, + "step": 1089 + }, + { + "epoch": 0.8711288711288712, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.9269, + "step": 1090 + }, + { + "epoch": 0.871928071928072, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.9325, + "step": 1091 + }, + { + "epoch": 0.8727272727272727, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.9427, + "step": 1092 + }, + { + "epoch": 0.8735264735264735, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9423, + "step": 1093 + }, + { + "epoch": 0.8743256743256743, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.94, + "step": 1094 + }, + { + "epoch": 0.8751248751248751, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.9444, + "step": 1095 + }, + { + "epoch": 0.875924075924076, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9447, + "step": 1096 + }, + { + "epoch": 0.8767232767232768, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.939, + "step": 1097 + }, + { + "epoch": 0.8775224775224775, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.9329, + "step": 1098 + }, + { + "epoch": 0.8783216783216783, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.9462, + "step": 1099 + }, + { + "epoch": 0.8791208791208791, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.9465, + "step": 1100 + }, + { + "epoch": 0.8799200799200799, + "grad_norm": 0.6171875, + "learning_rate": 0.0002, + "loss": 0.9393, + "step": 1101 + }, + { + "epoch": 0.8807192807192807, + "grad_norm": 0.77734375, + "learning_rate": 0.0002, + "loss": 0.9396, + "step": 1102 + }, + { + "epoch": 0.8815184815184816, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.9427, + "step": 1103 + }, + { + "epoch": 0.8823176823176824, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9457, + "step": 1104 + }, + { + "epoch": 0.8831168831168831, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.9512, + "step": 1105 + }, + { + "epoch": 0.8839160839160839, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.9462, + "step": 1106 + }, + { + "epoch": 0.8847152847152847, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9364, + "step": 1107 + }, + { + "epoch": 0.8855144855144855, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.9409, + "step": 1108 + }, + { + "epoch": 0.8863136863136863, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.9398, + "step": 1109 + }, + { + "epoch": 0.8871128871128872, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9413, + "step": 1110 + }, + { + "epoch": 0.8879120879120879, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.9394, + "step": 1111 + }, + { + "epoch": 0.8887112887112887, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.932, + "step": 1112 + }, + { + "epoch": 0.8895104895104895, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.9468, + "step": 1113 + }, + { + "epoch": 0.8903096903096903, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.9449, + "step": 1114 + }, + { + "epoch": 0.8911088911088911, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.9555, + "step": 1115 + }, + { + "epoch": 0.891908091908092, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.9366, + "step": 1116 + }, + { + "epoch": 0.8927072927072927, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9405, + "step": 1117 + }, + { + "epoch": 0.8935064935064935, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.9425, + "step": 1118 + }, + { + "epoch": 0.8943056943056943, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.9437, + "step": 1119 + }, + { + "epoch": 0.8951048951048951, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9488, + "step": 1120 + }, + { + "epoch": 0.8959040959040959, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9472, + "step": 1121 + }, + { + "epoch": 0.8967032967032967, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.9304, + "step": 1122 + }, + { + "epoch": 0.8975024975024976, + "grad_norm": 0.62890625, + "learning_rate": 0.0002, + "loss": 0.9383, + "step": 1123 + }, + { + "epoch": 0.8983016983016983, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.9412, + "step": 1124 + }, + { + "epoch": 0.8991008991008991, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9414, + "step": 1125 + }, + { + "epoch": 0.8999000999000999, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.9443, + "step": 1126 + }, + { + "epoch": 0.9006993006993007, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9346, + "step": 1127 + }, + { + "epoch": 0.9014985014985015, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.944, + "step": 1128 + }, + { + "epoch": 0.9022977022977023, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.9511, + "step": 1129 + }, + { + "epoch": 0.903096903096903, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9372, + "step": 1130 + }, + { + "epoch": 0.9038961038961039, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 0.9308, + "step": 1131 + }, + { + "epoch": 0.9046953046953047, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.9475, + "step": 1132 + }, + { + "epoch": 0.9054945054945055, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.9335, + "step": 1133 + }, + { + "epoch": 0.9062937062937063, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.9362, + "step": 1134 + }, + { + "epoch": 0.9070929070929071, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9465, + "step": 1135 + }, + { + "epoch": 0.907892107892108, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.936, + "step": 1136 + }, + { + "epoch": 0.9086913086913087, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.933, + "step": 1137 + }, + { + "epoch": 0.9094905094905095, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9399, + "step": 1138 + }, + { + "epoch": 0.9102897102897103, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9511, + "step": 1139 + }, + { + "epoch": 0.9110889110889111, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.9392, + "step": 1140 + }, + { + "epoch": 0.9118881118881119, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.9357, + "step": 1141 + }, + { + "epoch": 0.9126873126873127, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9328, + "step": 1142 + }, + { + "epoch": 0.9134865134865134, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.9438, + "step": 1143 + }, + { + "epoch": 0.9142857142857143, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9275, + "step": 1144 + }, + { + "epoch": 0.9150849150849151, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9262, + "step": 1145 + }, + { + "epoch": 0.9158841158841159, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9362, + "step": 1146 + }, + { + "epoch": 0.9166833166833167, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9398, + "step": 1147 + }, + { + "epoch": 0.9174825174825175, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9338, + "step": 1148 + }, + { + "epoch": 0.9182817182817182, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9387, + "step": 1149 + }, + { + "epoch": 0.919080919080919, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9438, + "step": 1150 + }, + { + "epoch": 0.9198801198801199, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.9406, + "step": 1151 + }, + { + "epoch": 0.9206793206793207, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.9468, + "step": 1152 + }, + { + "epoch": 0.9214785214785215, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9333, + "step": 1153 + }, + { + "epoch": 0.9222777222777223, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.9339, + "step": 1154 + }, + { + "epoch": 0.9230769230769231, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.9395, + "step": 1155 + }, + { + "epoch": 0.9238761238761238, + "grad_norm": 0.65234375, + "learning_rate": 0.0002, + "loss": 0.9363, + "step": 1156 + }, + { + "epoch": 0.9246753246753247, + "grad_norm": 1.0546875, + "learning_rate": 0.0002, + "loss": 0.9442, + "step": 1157 + }, + { + "epoch": 0.9254745254745255, + "grad_norm": 2.375, + "learning_rate": 0.0002, + "loss": 0.9493, + "step": 1158 + }, + { + "epoch": 0.9262737262737263, + "grad_norm": 1.296875, + "learning_rate": 0.0002, + "loss": 0.9541, + "step": 1159 + }, + { + "epoch": 0.9270729270729271, + "grad_norm": 3.546875, + "learning_rate": 0.0002, + "loss": 0.9484, + "step": 1160 + }, + { + "epoch": 0.9278721278721279, + "grad_norm": 2.90625, + "learning_rate": 0.0002, + "loss": 0.9599, + "step": 1161 + }, + { + "epoch": 0.9286713286713286, + "grad_norm": 1.28125, + "learning_rate": 0.0002, + "loss": 0.9439, + "step": 1162 + }, + { + "epoch": 0.9294705294705294, + "grad_norm": 0.94140625, + "learning_rate": 0.0002, + "loss": 0.9559, + "step": 1163 + }, + { + "epoch": 0.9302697302697303, + "grad_norm": 0.87109375, + "learning_rate": 0.0002, + "loss": 0.9517, + "step": 1164 + }, + { + "epoch": 0.9310689310689311, + "grad_norm": 0.98828125, + "learning_rate": 0.0002, + "loss": 0.9494, + "step": 1165 + }, + { + "epoch": 0.9318681318681319, + "grad_norm": 1.3125, + "learning_rate": 0.0002, + "loss": 0.9535, + "step": 1166 + }, + { + "epoch": 0.9326673326673327, + "grad_norm": 0.6015625, + "learning_rate": 0.0002, + "loss": 0.9412, + "step": 1167 + }, + { + "epoch": 0.9334665334665335, + "grad_norm": 1.375, + "learning_rate": 0.0002, + "loss": 0.9478, + "step": 1168 + }, + { + "epoch": 0.9342657342657342, + "grad_norm": 0.60546875, + "learning_rate": 0.0002, + "loss": 0.9432, + "step": 1169 + }, + { + "epoch": 0.935064935064935, + "grad_norm": 1.296875, + "learning_rate": 0.0002, + "loss": 0.9486, + "step": 1170 + }, + { + "epoch": 0.9358641358641359, + "grad_norm": 0.78515625, + "learning_rate": 0.0002, + "loss": 0.9442, + "step": 1171 + }, + { + "epoch": 0.9366633366633367, + "grad_norm": 1.140625, + "learning_rate": 0.0002, + "loss": 0.9525, + "step": 1172 + }, + { + "epoch": 0.9374625374625375, + "grad_norm": 0.7890625, + "learning_rate": 0.0002, + "loss": 0.9447, + "step": 1173 + }, + { + "epoch": 0.9382617382617383, + "grad_norm": 1.15625, + "learning_rate": 0.0002, + "loss": 0.9462, + "step": 1174 + }, + { + "epoch": 0.939060939060939, + "grad_norm": 0.73828125, + "learning_rate": 0.0002, + "loss": 0.9585, + "step": 1175 + }, + { + "epoch": 0.9398601398601398, + "grad_norm": 1.2109375, + "learning_rate": 0.0002, + "loss": 0.95, + "step": 1176 + }, + { + "epoch": 0.9406593406593406, + "grad_norm": 0.6953125, + "learning_rate": 0.0002, + "loss": 0.9418, + "step": 1177 + }, + { + "epoch": 0.9414585414585415, + "grad_norm": 1.0, + "learning_rate": 0.0002, + "loss": 0.938, + "step": 1178 + }, + { + "epoch": 0.9422577422577423, + "grad_norm": 0.84375, + "learning_rate": 0.0002, + "loss": 0.9535, + "step": 1179 + }, + { + "epoch": 0.9430569430569431, + "grad_norm": 0.78515625, + "learning_rate": 0.0002, + "loss": 0.941, + "step": 1180 + }, + { + "epoch": 0.9438561438561438, + "grad_norm": 0.66796875, + "learning_rate": 0.0002, + "loss": 0.9446, + "step": 1181 + }, + { + "epoch": 0.9446553446553446, + "grad_norm": 0.60546875, + "learning_rate": 0.0002, + "loss": 0.9406, + "step": 1182 + }, + { + "epoch": 0.9454545454545454, + "grad_norm": 0.69921875, + "learning_rate": 0.0002, + "loss": 0.9593, + "step": 1183 + }, + { + "epoch": 0.9462537462537463, + "grad_norm": 0.65234375, + "learning_rate": 0.0002, + "loss": 0.9334, + "step": 1184 + }, + { + "epoch": 0.9470529470529471, + "grad_norm": 0.578125, + "learning_rate": 0.0002, + "loss": 0.9429, + "step": 1185 + }, + { + "epoch": 0.9478521478521479, + "grad_norm": 0.62890625, + "learning_rate": 0.0002, + "loss": 0.9419, + "step": 1186 + }, + { + "epoch": 0.9486513486513487, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.9314, + "step": 1187 + }, + { + "epoch": 0.9494505494505494, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.936, + "step": 1188 + }, + { + "epoch": 0.9502497502497502, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.9393, + "step": 1189 + }, + { + "epoch": 0.951048951048951, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.932, + "step": 1190 + }, + { + "epoch": 0.9518481518481519, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.9316, + "step": 1191 + }, + { + "epoch": 0.9526473526473527, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9404, + "step": 1192 + }, + { + "epoch": 0.9534465534465535, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9301, + "step": 1193 + }, + { + "epoch": 0.9542457542457542, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.9331, + "step": 1194 + }, + { + "epoch": 0.955044955044955, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9403, + "step": 1195 + }, + { + "epoch": 0.9558441558441558, + "grad_norm": 0.65625, + "learning_rate": 0.0002, + "loss": 0.9766, + "step": 1196 + }, + { + "epoch": 0.9566433566433566, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.94, + "step": 1197 + }, + { + "epoch": 0.9574425574425575, + "grad_norm": 0.94140625, + "learning_rate": 0.0002, + "loss": 0.9464, + "step": 1198 + }, + { + "epoch": 0.9582417582417583, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.944, + "step": 1199 + }, + { + "epoch": 0.9590409590409591, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.9389, + "step": 1200 + }, + { + "epoch": 0.9598401598401598, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.947, + "step": 1201 + }, + { + "epoch": 0.9606393606393606, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9404, + "step": 1202 + }, + { + "epoch": 0.9614385614385614, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.9371, + "step": 1203 + }, + { + "epoch": 0.9622377622377623, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9316, + "step": 1204 + }, + { + "epoch": 0.9630369630369631, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.9416, + "step": 1205 + }, + { + "epoch": 0.9638361638361639, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9433, + "step": 1206 + }, + { + "epoch": 0.9646353646353646, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9397, + "step": 1207 + }, + { + "epoch": 0.9654345654345654, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.9428, + "step": 1208 + }, + { + "epoch": 0.9662337662337662, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.9506, + "step": 1209 + }, + { + "epoch": 0.967032967032967, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9429, + "step": 1210 + }, + { + "epoch": 0.9678321678321679, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9355, + "step": 1211 + }, + { + "epoch": 0.9686313686313687, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.943, + "step": 1212 + }, + { + "epoch": 0.9694305694305694, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9498, + "step": 1213 + }, + { + "epoch": 0.9702297702297702, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.9406, + "step": 1214 + }, + { + "epoch": 0.971028971028971, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.9395, + "step": 1215 + }, + { + "epoch": 0.9718281718281718, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.9436, + "step": 1216 + }, + { + "epoch": 0.9726273726273726, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.9375, + "step": 1217 + }, + { + "epoch": 0.9734265734265735, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.9407, + "step": 1218 + }, + { + "epoch": 0.9742257742257743, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.9298, + "step": 1219 + }, + { + "epoch": 0.975024975024975, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.9448, + "step": 1220 + }, + { + "epoch": 0.9758241758241758, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.94, + "step": 1221 + }, + { + "epoch": 0.9766233766233766, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.9365, + "step": 1222 + }, + { + "epoch": 0.9774225774225774, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9435, + "step": 1223 + }, + { + "epoch": 0.9782217782217782, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.9354, + "step": 1224 + }, + { + "epoch": 0.9790209790209791, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9338, + "step": 1225 + }, + { + "epoch": 0.9798201798201798, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.9355, + "step": 1226 + }, + { + "epoch": 0.9806193806193806, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.938, + "step": 1227 + }, + { + "epoch": 0.9814185814185814, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.9358, + "step": 1228 + }, + { + "epoch": 0.9822177822177822, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9282, + "step": 1229 + }, + { + "epoch": 0.983016983016983, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.936, + "step": 1230 + }, + { + "epoch": 0.9838161838161839, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9365, + "step": 1231 + }, + { + "epoch": 0.9846153846153847, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.9384, + "step": 1232 + }, + { + "epoch": 0.9854145854145854, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9337, + "step": 1233 + }, + { + "epoch": 0.9862137862137862, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9407, + "step": 1234 + }, + { + "epoch": 0.987012987012987, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9377, + "step": 1235 + }, + { + "epoch": 0.9878121878121878, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9336, + "step": 1236 + }, + { + "epoch": 0.9886113886113886, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9384, + "step": 1237 + }, + { + "epoch": 0.9894105894105895, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9239, + "step": 1238 + }, + { + "epoch": 0.9902097902097902, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9316, + "step": 1239 + }, + { + "epoch": 0.991008991008991, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9388, + "step": 1240 + }, + { + "epoch": 0.9918081918081918, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9426, + "step": 1241 + }, + { + "epoch": 0.9926073926073926, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.9359, + "step": 1242 + }, + { + "epoch": 0.9934065934065934, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.944, + "step": 1243 + }, + { + "epoch": 0.9942057942057942, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9356, + "step": 1244 + }, + { + "epoch": 0.995004995004995, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9404, + "step": 1245 + }, + { + "epoch": 0.9958041958041958, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9422, + "step": 1246 + }, + { + "epoch": 0.9966033966033966, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.9377, + "step": 1247 + }, + { + "epoch": 0.9974025974025974, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9373, + "step": 1248 + }, + { + "epoch": 0.9982017982017982, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.9334, + "step": 1249 + }, + { + "epoch": 0.999000999000999, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9355, + "step": 1250 + }, + { + "epoch": 0.9998001998001999, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9339, + "step": 1251 + }, + { + "epoch": 1.0, + "grad_norm": 0.12255859375, + "learning_rate": 0.0002, + "loss": 0.2358, + "step": 1252 + }, + { + "epoch": 1.0007992007992008, + "grad_norm": 0.62890625, + "learning_rate": 0.0002, + "loss": 0.9372, + "step": 1253 + }, + { + "epoch": 1.0015984015984016, + "grad_norm": 0.75, + "learning_rate": 0.0002, + "loss": 0.9368, + "step": 1254 + }, + { + "epoch": 1.0023976023976024, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.9396, + "step": 1255 + }, + { + "epoch": 1.0031968031968033, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9409, + "step": 1256 + }, + { + "epoch": 1.003996003996004, + "grad_norm": 0.625, + "learning_rate": 0.0002, + "loss": 0.932, + "step": 1257 + }, + { + "epoch": 1.004795204795205, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.9329, + "step": 1258 + }, + { + "epoch": 1.0055944055944055, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.936, + "step": 1259 + }, + { + "epoch": 1.0063936063936063, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 0.9437, + "step": 1260 + }, + { + "epoch": 1.0071928071928071, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.9371, + "step": 1261 + }, + { + "epoch": 1.007992007992008, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9371, + "step": 1262 + }, + { + "epoch": 1.0087912087912088, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9259, + "step": 1263 + }, + { + "epoch": 1.0095904095904096, + "grad_norm": 1.0546875, + "learning_rate": 0.0002, + "loss": 0.9412, + "step": 1264 + }, + { + "epoch": 1.0103896103896104, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.933, + "step": 1265 + }, + { + "epoch": 1.0111888111888112, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9364, + "step": 1266 + }, + { + "epoch": 1.011988011988012, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9288, + "step": 1267 + }, + { + "epoch": 1.0127872127872128, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9485, + "step": 1268 + }, + { + "epoch": 1.0135864135864137, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9283, + "step": 1269 + }, + { + "epoch": 1.0143856143856145, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.9278, + "step": 1270 + }, + { + "epoch": 1.0151848151848153, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.93, + "step": 1271 + }, + { + "epoch": 1.0159840159840159, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9295, + "step": 1272 + }, + { + "epoch": 1.0167832167832167, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.939, + "step": 1273 + }, + { + "epoch": 1.0175824175824175, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9304, + "step": 1274 + }, + { + "epoch": 1.0183816183816183, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.9326, + "step": 1275 + }, + { + "epoch": 1.0191808191808192, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9334, + "step": 1276 + }, + { + "epoch": 1.01998001998002, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.9164, + "step": 1277 + }, + { + "epoch": 1.0207792207792208, + "grad_norm": 1.1015625, + "learning_rate": 0.0002, + "loss": 0.9679, + "step": 1278 + }, + { + "epoch": 1.0215784215784216, + "grad_norm": 0.69140625, + "learning_rate": 0.0002, + "loss": 0.9393, + "step": 1279 + }, + { + "epoch": 1.0223776223776224, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9296, + "step": 1280 + }, + { + "epoch": 1.0231768231768232, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.9391, + "step": 1281 + }, + { + "epoch": 1.023976023976024, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.939, + "step": 1282 + }, + { + "epoch": 1.0247752247752249, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.9336, + "step": 1283 + }, + { + "epoch": 1.0255744255744257, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9453, + "step": 1284 + }, + { + "epoch": 1.0263736263736263, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9352, + "step": 1285 + }, + { + "epoch": 1.027172827172827, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9384, + "step": 1286 + }, + { + "epoch": 1.027972027972028, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9434, + "step": 1287 + }, + { + "epoch": 1.0287712287712287, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.9381, + "step": 1288 + }, + { + "epoch": 1.0295704295704295, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.9455, + "step": 1289 + }, + { + "epoch": 1.0303696303696304, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.9323, + "step": 1290 + }, + { + "epoch": 1.0311688311688312, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9405, + "step": 1291 + }, + { + "epoch": 1.031968031968032, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9367, + "step": 1292 + }, + { + "epoch": 1.0327672327672328, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.9288, + "step": 1293 + }, + { + "epoch": 1.0335664335664336, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9355, + "step": 1294 + }, + { + "epoch": 1.0343656343656344, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.9339, + "step": 1295 + }, + { + "epoch": 1.0351648351648353, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9437, + "step": 1296 + }, + { + "epoch": 1.035964035964036, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.9289, + "step": 1297 + }, + { + "epoch": 1.0367632367632367, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9286, + "step": 1298 + }, + { + "epoch": 1.0375624375624375, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9325, + "step": 1299 + }, + { + "epoch": 1.0383616383616383, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9316, + "step": 1300 + }, + { + "epoch": 1.0391608391608391, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.9319, + "step": 1301 + }, + { + "epoch": 1.03996003996004, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.933, + "step": 1302 + }, + { + "epoch": 1.0407592407592408, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9247, + "step": 1303 + }, + { + "epoch": 1.0415584415584416, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9378, + "step": 1304 + }, + { + "epoch": 1.0423576423576424, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.9313, + "step": 1305 + }, + { + "epoch": 1.0431568431568432, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.93, + "step": 1306 + }, + { + "epoch": 1.043956043956044, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.9392, + "step": 1307 + }, + { + "epoch": 1.0447552447552448, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9397, + "step": 1308 + }, + { + "epoch": 1.0455544455544457, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.9317, + "step": 1309 + }, + { + "epoch": 1.0463536463536462, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9344, + "step": 1310 + }, + { + "epoch": 1.047152847152847, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9298, + "step": 1311 + }, + { + "epoch": 1.0479520479520479, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.9352, + "step": 1312 + }, + { + "epoch": 1.0487512487512487, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.9258, + "step": 1313 + }, + { + "epoch": 1.0495504495504495, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9268, + "step": 1314 + }, + { + "epoch": 1.0503496503496503, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9337, + "step": 1315 + }, + { + "epoch": 1.0511488511488511, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9359, + "step": 1316 + }, + { + "epoch": 1.051948051948052, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.9341, + "step": 1317 + }, + { + "epoch": 1.0527472527472528, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.9385, + "step": 1318 + }, + { + "epoch": 1.0535464535464536, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9307, + "step": 1319 + }, + { + "epoch": 1.0543456543456544, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9357, + "step": 1320 + }, + { + "epoch": 1.0551448551448552, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9395, + "step": 1321 + }, + { + "epoch": 1.055944055944056, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9275, + "step": 1322 + }, + { + "epoch": 1.0567432567432566, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9222, + "step": 1323 + }, + { + "epoch": 1.0575424575424575, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.9293, + "step": 1324 + }, + { + "epoch": 1.0583416583416583, + "grad_norm": 1.109375, + "learning_rate": 0.0002, + "loss": 0.9581, + "step": 1325 + }, + { + "epoch": 1.059140859140859, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9259, + "step": 1326 + }, + { + "epoch": 1.05994005994006, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9327, + "step": 1327 + }, + { + "epoch": 1.0607392607392607, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.9344, + "step": 1328 + }, + { + "epoch": 1.0615384615384615, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9431, + "step": 1329 + }, + { + "epoch": 1.0623376623376624, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9363, + "step": 1330 + }, + { + "epoch": 1.0631368631368632, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.9342, + "step": 1331 + }, + { + "epoch": 1.063936063936064, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9242, + "step": 1332 + }, + { + "epoch": 1.0647352647352648, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.9385, + "step": 1333 + }, + { + "epoch": 1.0655344655344656, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9363, + "step": 1334 + }, + { + "epoch": 1.0663336663336662, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9367, + "step": 1335 + }, + { + "epoch": 1.067132867132867, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9381, + "step": 1336 + }, + { + "epoch": 1.0679320679320679, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.9399, + "step": 1337 + }, + { + "epoch": 1.0687312687312687, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9397, + "step": 1338 + }, + { + "epoch": 1.0695304695304695, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.9396, + "step": 1339 + }, + { + "epoch": 1.0703296703296703, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9306, + "step": 1340 + }, + { + "epoch": 1.0711288711288711, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.929, + "step": 1341 + }, + { + "epoch": 1.071928071928072, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9429, + "step": 1342 + }, + { + "epoch": 1.0727272727272728, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.9499, + "step": 1343 + }, + { + "epoch": 1.0735264735264736, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9336, + "step": 1344 + }, + { + "epoch": 1.0743256743256744, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.9191, + "step": 1345 + }, + { + "epoch": 1.0751248751248752, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.9243, + "step": 1346 + }, + { + "epoch": 1.075924075924076, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9325, + "step": 1347 + }, + { + "epoch": 1.0767232767232766, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9362, + "step": 1348 + }, + { + "epoch": 1.0775224775224774, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9354, + "step": 1349 + }, + { + "epoch": 1.0783216783216782, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9351, + "step": 1350 + }, + { + "epoch": 1.079120879120879, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.9324, + "step": 1351 + }, + { + "epoch": 1.0799200799200799, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.9229, + "step": 1352 + }, + { + "epoch": 1.0807192807192807, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.9406, + "step": 1353 + }, + { + "epoch": 1.0815184815184815, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.9278, + "step": 1354 + }, + { + "epoch": 1.0823176823176823, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9294, + "step": 1355 + }, + { + "epoch": 1.0831168831168831, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9318, + "step": 1356 + }, + { + "epoch": 1.083916083916084, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9327, + "step": 1357 + }, + { + "epoch": 1.0847152847152848, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.9339, + "step": 1358 + }, + { + "epoch": 1.0855144855144856, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.9343, + "step": 1359 + }, + { + "epoch": 1.0863136863136864, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.9433, + "step": 1360 + }, + { + "epoch": 1.087112887112887, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.9369, + "step": 1361 + }, + { + "epoch": 1.0879120879120878, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.9335, + "step": 1362 + }, + { + "epoch": 1.0887112887112886, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.9289, + "step": 1363 + }, + { + "epoch": 1.0895104895104895, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9274, + "step": 1364 + }, + { + "epoch": 1.0903096903096903, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9346, + "step": 1365 + }, + { + "epoch": 1.091108891108891, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.9329, + "step": 1366 + }, + { + "epoch": 1.091908091908092, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9304, + "step": 1367 + }, + { + "epoch": 1.0927072927072927, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9315, + "step": 1368 + }, + { + "epoch": 1.0935064935064935, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9376, + "step": 1369 + }, + { + "epoch": 1.0943056943056944, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.9363, + "step": 1370 + }, + { + "epoch": 1.0951048951048952, + "grad_norm": 0.75, + "learning_rate": 0.0002, + "loss": 0.9328, + "step": 1371 + }, + { + "epoch": 1.095904095904096, + "grad_norm": 1.28125, + "learning_rate": 0.0002, + "loss": 0.9425, + "step": 1372 + }, + { + "epoch": 1.0967032967032968, + "grad_norm": 1.4609375, + "learning_rate": 0.0002, + "loss": 0.9346, + "step": 1373 + }, + { + "epoch": 1.0975024975024974, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9282, + "step": 1374 + }, + { + "epoch": 1.0983016983016982, + "grad_norm": 2.0625, + "learning_rate": 0.0002, + "loss": 0.9444, + "step": 1375 + }, + { + "epoch": 1.099100899100899, + "grad_norm": 0.79296875, + "learning_rate": 0.0002, + "loss": 0.932, + "step": 1376 + }, + { + "epoch": 1.0999000999000998, + "grad_norm": 2.5625, + "learning_rate": 0.0002, + "loss": 0.9439, + "step": 1377 + }, + { + "epoch": 1.1006993006993007, + "grad_norm": 1.6953125, + "learning_rate": 0.0002, + "loss": 0.9412, + "step": 1378 + }, + { + "epoch": 1.1014985014985015, + "grad_norm": 1.234375, + "learning_rate": 0.0002, + "loss": 0.9368, + "step": 1379 + }, + { + "epoch": 1.1022977022977023, + "grad_norm": 0.89453125, + "learning_rate": 0.0002, + "loss": 0.9445, + "step": 1380 + }, + { + "epoch": 1.1030969030969031, + "grad_norm": 1.5703125, + "learning_rate": 0.0002, + "loss": 0.9492, + "step": 1381 + }, + { + "epoch": 1.103896103896104, + "grad_norm": 0.8125, + "learning_rate": 0.0002, + "loss": 0.943, + "step": 1382 + }, + { + "epoch": 1.1046953046953047, + "grad_norm": 1.9453125, + "learning_rate": 0.0002, + "loss": 0.9568, + "step": 1383 + }, + { + "epoch": 1.1054945054945056, + "grad_norm": 1.0703125, + "learning_rate": 0.0002, + "loss": 0.9467, + "step": 1384 + }, + { + "epoch": 1.1062937062937064, + "grad_norm": 2.5625, + "learning_rate": 0.0002, + "loss": 0.971, + "step": 1385 + }, + { + "epoch": 1.1070929070929072, + "grad_norm": 1.828125, + "learning_rate": 0.0002, + "loss": 0.9598, + "step": 1386 + }, + { + "epoch": 1.1078921078921078, + "grad_norm": 1.734375, + "learning_rate": 0.0002, + "loss": 0.9659, + "step": 1387 + }, + { + "epoch": 1.1086913086913086, + "grad_norm": 1.2890625, + "learning_rate": 0.0002, + "loss": 0.9437, + "step": 1388 + }, + { + "epoch": 1.1094905094905094, + "grad_norm": 1.703125, + "learning_rate": 0.0002, + "loss": 0.9568, + "step": 1389 + }, + { + "epoch": 1.1102897102897102, + "grad_norm": 1.2421875, + "learning_rate": 0.0002, + "loss": 0.9534, + "step": 1390 + }, + { + "epoch": 1.111088911088911, + "grad_norm": 1.3125, + "learning_rate": 0.0002, + "loss": 0.9514, + "step": 1391 + }, + { + "epoch": 1.1118881118881119, + "grad_norm": 1.0, + "learning_rate": 0.0002, + "loss": 0.9509, + "step": 1392 + }, + { + "epoch": 1.1126873126873127, + "grad_norm": 1.0703125, + "learning_rate": 0.0002, + "loss": 0.9426, + "step": 1393 + }, + { + "epoch": 1.1134865134865135, + "grad_norm": 0.765625, + "learning_rate": 0.0002, + "loss": 0.9469, + "step": 1394 + }, + { + "epoch": 1.1142857142857143, + "grad_norm": 0.8125, + "learning_rate": 0.0002, + "loss": 0.9535, + "step": 1395 + }, + { + "epoch": 1.1150849150849151, + "grad_norm": 0.76171875, + "learning_rate": 0.0002, + "loss": 0.9415, + "step": 1396 + }, + { + "epoch": 1.115884115884116, + "grad_norm": 0.68359375, + "learning_rate": 0.0002, + "loss": 0.9453, + "step": 1397 + }, + { + "epoch": 1.1166833166833168, + "grad_norm": 0.7734375, + "learning_rate": 0.0002, + "loss": 0.9409, + "step": 1398 + }, + { + "epoch": 1.1174825174825176, + "grad_norm": 0.671875, + "learning_rate": 0.0002, + "loss": 0.9422, + "step": 1399 + }, + { + "epoch": 1.1182817182817182, + "grad_norm": 0.59765625, + "learning_rate": 0.0002, + "loss": 0.935, + "step": 1400 + }, + { + "epoch": 1.119080919080919, + "grad_norm": 0.59765625, + "learning_rate": 0.0002, + "loss": 0.9464, + "step": 1401 + }, + { + "epoch": 1.1198801198801198, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.9373, + "step": 1402 + }, + { + "epoch": 1.1206793206793206, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.9476, + "step": 1403 + }, + { + "epoch": 1.1214785214785215, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.9477, + "step": 1404 + }, + { + "epoch": 1.1222777222777223, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9312, + "step": 1405 + }, + { + "epoch": 1.123076923076923, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9305, + "step": 1406 + }, + { + "epoch": 1.123876123876124, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9405, + "step": 1407 + }, + { + "epoch": 1.1246753246753247, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9276, + "step": 1408 + }, + { + "epoch": 1.1254745254745255, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.9332, + "step": 1409 + }, + { + "epoch": 1.1262737262737263, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9283, + "step": 1410 + }, + { + "epoch": 1.1270729270729272, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9315, + "step": 1411 + }, + { + "epoch": 1.127872127872128, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.9263, + "step": 1412 + }, + { + "epoch": 1.1286713286713286, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.9246, + "step": 1413 + }, + { + "epoch": 1.1294705294705294, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.9373, + "step": 1414 + }, + { + "epoch": 1.1302697302697302, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.9307, + "step": 1415 + }, + { + "epoch": 1.131068931068931, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9426, + "step": 1416 + }, + { + "epoch": 1.1318681318681318, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.9335, + "step": 1417 + }, + { + "epoch": 1.1326673326673327, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.929, + "step": 1418 + }, + { + "epoch": 1.1334665334665335, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.9286, + "step": 1419 + }, + { + "epoch": 1.1342657342657343, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.9314, + "step": 1420 + }, + { + "epoch": 1.135064935064935, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.9299, + "step": 1421 + }, + { + "epoch": 1.135864135864136, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.9255, + "step": 1422 + }, + { + "epoch": 1.1366633366633367, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.9374, + "step": 1423 + }, + { + "epoch": 1.1374625374625376, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.9337, + "step": 1424 + }, + { + "epoch": 1.1382617382617384, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.9261, + "step": 1425 + }, + { + "epoch": 1.139060939060939, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.9305, + "step": 1426 + }, + { + "epoch": 1.1398601398601398, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.923, + "step": 1427 + }, + { + "epoch": 1.1406593406593406, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.928, + "step": 1428 + }, + { + "epoch": 1.1414585414585414, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.9353, + "step": 1429 + }, + { + "epoch": 1.1422577422577422, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.9333, + "step": 1430 + }, + { + "epoch": 1.143056943056943, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.9382, + "step": 1431 + }, + { + "epoch": 1.1438561438561439, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.9257, + "step": 1432 + }, + { + "epoch": 1.1446553446553447, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.9349, + "step": 1433 + }, + { + "epoch": 1.1454545454545455, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.9372, + "step": 1434 + }, + { + "epoch": 1.1462537462537463, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.924, + "step": 1435 + }, + { + "epoch": 1.1470529470529471, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.9228, + "step": 1436 + }, + { + "epoch": 1.1478521478521477, + "grad_norm": 0.2392578125, + "learning_rate": 0.0002, + "loss": 0.9315, + "step": 1437 + }, + { + "epoch": 1.1486513486513488, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.937, + "step": 1438 + }, + { + "epoch": 1.1494505494505494, + "grad_norm": 0.2470703125, + "learning_rate": 0.0002, + "loss": 0.9237, + "step": 1439 + }, + { + "epoch": 1.1502497502497502, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.9387, + "step": 1440 + }, + { + "epoch": 1.151048951048951, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.9177, + "step": 1441 + }, + { + "epoch": 1.1518481518481518, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.9349, + "step": 1442 + }, + { + "epoch": 1.1526473526473526, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.9463, + "step": 1443 + }, + { + "epoch": 1.1534465534465534, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.9357, + "step": 1444 + }, + { + "epoch": 1.1542457542457543, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.9301, + "step": 1445 + }, + { + "epoch": 1.155044955044955, + "grad_norm": 1.3671875, + "learning_rate": 0.0002, + "loss": 0.9399, + "step": 1446 + }, + { + "epoch": 1.155844155844156, + "grad_norm": 0.6796875, + "learning_rate": 0.0002, + "loss": 0.9418, + "step": 1447 + }, + { + "epoch": 1.1566433566433567, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.9425, + "step": 1448 + }, + { + "epoch": 1.1574425574425575, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.9285, + "step": 1449 + }, + { + "epoch": 1.1582417582417581, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.9284, + "step": 1450 + }, + { + "epoch": 1.1590409590409592, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.9379, + "step": 1451 + }, + { + "epoch": 1.1598401598401598, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.929, + "step": 1452 + }, + { + "epoch": 1.1606393606393606, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.9315, + "step": 1453 + }, + { + "epoch": 1.1614385614385614, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.9228, + "step": 1454 + }, + { + "epoch": 1.1622377622377622, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.9363, + "step": 1455 + }, + { + "epoch": 1.163036963036963, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.9247, + "step": 1456 + }, + { + "epoch": 1.1638361638361638, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.9242, + "step": 1457 + }, + { + "epoch": 1.1646353646353647, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.9292, + "step": 1458 + }, + { + "epoch": 1.1654345654345655, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9325, + "step": 1459 + }, + { + "epoch": 1.1662337662337663, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.9402, + "step": 1460 + }, + { + "epoch": 1.167032967032967, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9421, + "step": 1461 + }, + { + "epoch": 1.167832167832168, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.9228, + "step": 1462 + }, + { + "epoch": 1.1686313686313685, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9273, + "step": 1463 + }, + { + "epoch": 1.1694305694305696, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.9319, + "step": 1464 + }, + { + "epoch": 1.1702297702297701, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9371, + "step": 1465 + }, + { + "epoch": 1.171028971028971, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.9285, + "step": 1466 + }, + { + "epoch": 1.1718281718281718, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9268, + "step": 1467 + }, + { + "epoch": 1.1726273726273726, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.9269, + "step": 1468 + }, + { + "epoch": 1.1734265734265734, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.93, + "step": 1469 + }, + { + "epoch": 1.1742257742257742, + "grad_norm": 0.62890625, + "learning_rate": 0.0002, + "loss": 0.9493, + "step": 1470 + }, + { + "epoch": 1.175024975024975, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.925, + "step": 1471 + }, + { + "epoch": 1.1758241758241759, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9332, + "step": 1472 + }, + { + "epoch": 1.1766233766233767, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.9347, + "step": 1473 + }, + { + "epoch": 1.1774225774225775, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9228, + "step": 1474 + }, + { + "epoch": 1.1782217782217783, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.9329, + "step": 1475 + }, + { + "epoch": 1.179020979020979, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9335, + "step": 1476 + }, + { + "epoch": 1.1798201798201797, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.9274, + "step": 1477 + }, + { + "epoch": 1.1806193806193805, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.9248, + "step": 1478 + }, + { + "epoch": 1.1814185814185814, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.9293, + "step": 1479 + }, + { + "epoch": 1.1822177822177822, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.9324, + "step": 1480 + }, + { + "epoch": 1.183016983016983, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.9221, + "step": 1481 + }, + { + "epoch": 1.1838161838161838, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9188, + "step": 1482 + }, + { + "epoch": 1.1846153846153846, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.9314, + "step": 1483 + }, + { + "epoch": 1.1854145854145854, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.9312, + "step": 1484 + }, + { + "epoch": 1.1862137862137863, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.9301, + "step": 1485 + }, + { + "epoch": 1.187012987012987, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9215, + "step": 1486 + }, + { + "epoch": 1.187812187812188, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9228, + "step": 1487 + }, + { + "epoch": 1.1886113886113887, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.9322, + "step": 1488 + }, + { + "epoch": 1.1894105894105893, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.9336, + "step": 1489 + }, + { + "epoch": 1.1902097902097901, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9269, + "step": 1490 + }, + { + "epoch": 1.191008991008991, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.933, + "step": 1491 + }, + { + "epoch": 1.1918081918081918, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.928, + "step": 1492 + }, + { + "epoch": 1.1926073926073926, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9324, + "step": 1493 + }, + { + "epoch": 1.1934065934065934, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.9287, + "step": 1494 + }, + { + "epoch": 1.1942057942057942, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.9317, + "step": 1495 + }, + { + "epoch": 1.195004995004995, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9205, + "step": 1496 + }, + { + "epoch": 1.1958041958041958, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.9344, + "step": 1497 + }, + { + "epoch": 1.1966033966033967, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.9241, + "step": 1498 + }, + { + "epoch": 1.1974025974025975, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9211, + "step": 1499 + }, + { + "epoch": 1.1982017982017983, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.936, + "step": 1500 + }, + { + "epoch": 1.199000999000999, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.932, + "step": 1501 + }, + { + "epoch": 1.1998001998001997, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9256, + "step": 1502 + }, + { + "epoch": 1.2005994005994005, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.9361, + "step": 1503 + }, + { + "epoch": 1.2013986013986013, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.9209, + "step": 1504 + }, + { + "epoch": 1.2021978021978021, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.9202, + "step": 1505 + }, + { + "epoch": 1.202997002997003, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.931, + "step": 1506 + }, + { + "epoch": 1.2037962037962038, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9325, + "step": 1507 + }, + { + "epoch": 1.2045954045954046, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.9267, + "step": 1508 + }, + { + "epoch": 1.2053946053946054, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.9287, + "step": 1509 + }, + { + "epoch": 1.2061938061938062, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.9264, + "step": 1510 + }, + { + "epoch": 1.206993006993007, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.9231, + "step": 1511 + }, + { + "epoch": 1.2077922077922079, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.9228, + "step": 1512 + }, + { + "epoch": 1.2085914085914087, + "grad_norm": 1.3203125, + "learning_rate": 0.0002, + "loss": 0.9444, + "step": 1513 + }, + { + "epoch": 1.2093906093906095, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.9187, + "step": 1514 + }, + { + "epoch": 1.21018981018981, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.934, + "step": 1515 + }, + { + "epoch": 1.210989010989011, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9253, + "step": 1516 + }, + { + "epoch": 1.2117882117882117, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9292, + "step": 1517 + }, + { + "epoch": 1.2125874125874125, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.9241, + "step": 1518 + }, + { + "epoch": 1.2133866133866134, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.9272, + "step": 1519 + }, + { + "epoch": 1.2141858141858142, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.9193, + "step": 1520 + }, + { + "epoch": 1.214985014985015, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9303, + "step": 1521 + }, + { + "epoch": 1.2157842157842158, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9325, + "step": 1522 + }, + { + "epoch": 1.2165834165834166, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.9206, + "step": 1523 + }, + { + "epoch": 1.2173826173826174, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9235, + "step": 1524 + }, + { + "epoch": 1.2181818181818183, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.9229, + "step": 1525 + }, + { + "epoch": 1.218981018981019, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.9315, + "step": 1526 + }, + { + "epoch": 1.2197802197802199, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.9313, + "step": 1527 + }, + { + "epoch": 1.2205794205794205, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9285, + "step": 1528 + }, + { + "epoch": 1.2213786213786213, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9376, + "step": 1529 + }, + { + "epoch": 1.2221778221778221, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9241, + "step": 1530 + }, + { + "epoch": 1.222977022977023, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9164, + "step": 1531 + }, + { + "epoch": 1.2237762237762237, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9304, + "step": 1532 + }, + { + "epoch": 1.2245754245754246, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.9306, + "step": 1533 + }, + { + "epoch": 1.2253746253746254, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9263, + "step": 1534 + }, + { + "epoch": 1.2261738261738262, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.9212, + "step": 1535 + }, + { + "epoch": 1.226973026973027, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.942, + "step": 1536 + }, + { + "epoch": 1.2277722277722278, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.9325, + "step": 1537 + }, + { + "epoch": 1.2285714285714286, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9222, + "step": 1538 + }, + { + "epoch": 1.2293706293706295, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9396, + "step": 1539 + }, + { + "epoch": 1.2301698301698303, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.9319, + "step": 1540 + }, + { + "epoch": 1.2309690309690309, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9122, + "step": 1541 + }, + { + "epoch": 1.2317682317682317, + "grad_norm": 0.67578125, + "learning_rate": 0.0002, + "loss": 0.9232, + "step": 1542 + }, + { + "epoch": 1.2325674325674325, + "grad_norm": 0.59765625, + "learning_rate": 0.0002, + "loss": 0.929, + "step": 1543 + }, + { + "epoch": 1.2333666333666333, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.9239, + "step": 1544 + }, + { + "epoch": 1.2341658341658341, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9208, + "step": 1545 + }, + { + "epoch": 1.234965034965035, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.9317, + "step": 1546 + }, + { + "epoch": 1.2357642357642358, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.9209, + "step": 1547 + }, + { + "epoch": 1.2365634365634366, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.9321, + "step": 1548 + }, + { + "epoch": 1.2373626373626374, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9222, + "step": 1549 + }, + { + "epoch": 1.2381618381618382, + "grad_norm": 0.69921875, + "learning_rate": 0.0002, + "loss": 0.9267, + "step": 1550 + }, + { + "epoch": 1.238961038961039, + "grad_norm": 0.64453125, + "learning_rate": 0.0002, + "loss": 0.9408, + "step": 1551 + }, + { + "epoch": 1.2397602397602399, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9242, + "step": 1552 + }, + { + "epoch": 1.2405594405594407, + "grad_norm": 0.6640625, + "learning_rate": 0.0002, + "loss": 0.9335, + "step": 1553 + }, + { + "epoch": 1.2413586413586413, + "grad_norm": 0.64453125, + "learning_rate": 0.0002, + "loss": 0.9319, + "step": 1554 + }, + { + "epoch": 1.242157842157842, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.925, + "step": 1555 + }, + { + "epoch": 1.242957042957043, + "grad_norm": 0.59765625, + "learning_rate": 0.0002, + "loss": 0.9257, + "step": 1556 + }, + { + "epoch": 1.2437562437562437, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.9287, + "step": 1557 + }, + { + "epoch": 1.2445554445554445, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.9273, + "step": 1558 + }, + { + "epoch": 1.2453546453546454, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.9313, + "step": 1559 + }, + { + "epoch": 1.2461538461538462, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9304, + "step": 1560 + }, + { + "epoch": 1.246953046953047, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9261, + "step": 1561 + }, + { + "epoch": 1.2477522477522478, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9206, + "step": 1562 + }, + { + "epoch": 1.2485514485514486, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9277, + "step": 1563 + }, + { + "epoch": 1.2493506493506494, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9321, + "step": 1564 + }, + { + "epoch": 1.25014985014985, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9266, + "step": 1565 + }, + { + "epoch": 1.250949050949051, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9342, + "step": 1566 + }, + { + "epoch": 1.2517482517482517, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.9341, + "step": 1567 + }, + { + "epoch": 1.2525474525474525, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.9124, + "step": 1568 + }, + { + "epoch": 1.2533466533466533, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9268, + "step": 1569 + }, + { + "epoch": 1.254145854145854, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.924, + "step": 1570 + }, + { + "epoch": 1.254945054945055, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9302, + "step": 1571 + }, + { + "epoch": 1.2557442557442557, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.929, + "step": 1572 + }, + { + "epoch": 1.2565434565434566, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9282, + "step": 1573 + }, + { + "epoch": 1.2573426573426574, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9323, + "step": 1574 + }, + { + "epoch": 1.2581418581418582, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9326, + "step": 1575 + }, + { + "epoch": 1.258941058941059, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.9202, + "step": 1576 + }, + { + "epoch": 1.2597402597402598, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9274, + "step": 1577 + }, + { + "epoch": 1.2605394605394604, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.9222, + "step": 1578 + }, + { + "epoch": 1.2613386613386615, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.9265, + "step": 1579 + }, + { + "epoch": 1.262137862137862, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.9264, + "step": 1580 + }, + { + "epoch": 1.2629370629370629, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.9276, + "step": 1581 + }, + { + "epoch": 1.2637362637362637, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.9321, + "step": 1582 + }, + { + "epoch": 1.2645354645354645, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.9237, + "step": 1583 + }, + { + "epoch": 1.2653346653346653, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.9285, + "step": 1584 + }, + { + "epoch": 1.2661338661338661, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.9293, + "step": 1585 + }, + { + "epoch": 1.266933066933067, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.9332, + "step": 1586 + }, + { + "epoch": 1.2677322677322678, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9345, + "step": 1587 + }, + { + "epoch": 1.2685314685314686, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.9267, + "step": 1588 + }, + { + "epoch": 1.2693306693306694, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.9298, + "step": 1589 + }, + { + "epoch": 1.2701298701298702, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.9333, + "step": 1590 + }, + { + "epoch": 1.2709290709290708, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9271, + "step": 1591 + }, + { + "epoch": 1.2717282717282719, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.9279, + "step": 1592 + }, + { + "epoch": 1.2725274725274724, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9368, + "step": 1593 + }, + { + "epoch": 1.2733266733266733, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.9282, + "step": 1594 + }, + { + "epoch": 1.274125874125874, + "grad_norm": 0.67578125, + "learning_rate": 0.0002, + "loss": 0.9386, + "step": 1595 + }, + { + "epoch": 1.274925074925075, + "grad_norm": 0.734375, + "learning_rate": 0.0002, + "loss": 0.929, + "step": 1596 + }, + { + "epoch": 1.2757242757242757, + "grad_norm": 0.80078125, + "learning_rate": 0.0002, + "loss": 0.9206, + "step": 1597 + }, + { + "epoch": 1.2765234765234765, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9226, + "step": 1598 + }, + { + "epoch": 1.2773226773226773, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9343, + "step": 1599 + }, + { + "epoch": 1.2781218781218782, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.9262, + "step": 1600 + }, + { + "epoch": 1.278921078921079, + "grad_norm": 0.5859375, + "learning_rate": 0.0002, + "loss": 0.9256, + "step": 1601 + }, + { + "epoch": 1.2797202797202798, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.9228, + "step": 1602 + }, + { + "epoch": 1.2805194805194806, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.936, + "step": 1603 + }, + { + "epoch": 1.2813186813186812, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9354, + "step": 1604 + }, + { + "epoch": 1.2821178821178822, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9355, + "step": 1605 + }, + { + "epoch": 1.2829170829170828, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.922, + "step": 1606 + }, + { + "epoch": 1.2837162837162837, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.9245, + "step": 1607 + }, + { + "epoch": 1.2845154845154845, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.9319, + "step": 1608 + }, + { + "epoch": 1.2853146853146853, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.9237, + "step": 1609 + }, + { + "epoch": 1.286113886113886, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9212, + "step": 1610 + }, + { + "epoch": 1.286913086913087, + "grad_norm": 0.66796875, + "learning_rate": 0.0002, + "loss": 0.9242, + "step": 1611 + }, + { + "epoch": 1.2877122877122877, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.9184, + "step": 1612 + }, + { + "epoch": 1.2885114885114886, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.9297, + "step": 1613 + }, + { + "epoch": 1.2893106893106894, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.9242, + "step": 1614 + }, + { + "epoch": 1.2901098901098902, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.9254, + "step": 1615 + }, + { + "epoch": 1.290909090909091, + "grad_norm": 0.58203125, + "learning_rate": 0.0002, + "loss": 0.9175, + "step": 1616 + }, + { + "epoch": 1.2917082917082916, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.9292, + "step": 1617 + }, + { + "epoch": 1.2925074925074926, + "grad_norm": 0.6484375, + "learning_rate": 0.0002, + "loss": 0.9254, + "step": 1618 + }, + { + "epoch": 1.2933066933066932, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.9724, + "step": 1619 + }, + { + "epoch": 1.294105894105894, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9317, + "step": 1620 + }, + { + "epoch": 1.2949050949050949, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9367, + "step": 1621 + }, + { + "epoch": 1.2957042957042957, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.9181, + "step": 1622 + }, + { + "epoch": 1.2965034965034965, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.9243, + "step": 1623 + }, + { + "epoch": 1.2973026973026973, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.9253, + "step": 1624 + }, + { + "epoch": 1.2981018981018981, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9198, + "step": 1625 + }, + { + "epoch": 1.298901098901099, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9219, + "step": 1626 + }, + { + "epoch": 1.2997002997002998, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.9237, + "step": 1627 + }, + { + "epoch": 1.3004995004995006, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.9169, + "step": 1628 + }, + { + "epoch": 1.3012987012987014, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9269, + "step": 1629 + }, + { + "epoch": 1.302097902097902, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9216, + "step": 1630 + }, + { + "epoch": 1.302897102897103, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9266, + "step": 1631 + }, + { + "epoch": 1.3036963036963036, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9306, + "step": 1632 + }, + { + "epoch": 1.3044955044955044, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.9239, + "step": 1633 + }, + { + "epoch": 1.3052947052947053, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9313, + "step": 1634 + }, + { + "epoch": 1.306093906093906, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.9267, + "step": 1635 + }, + { + "epoch": 1.306893106893107, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.92, + "step": 1636 + }, + { + "epoch": 1.3076923076923077, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.9299, + "step": 1637 + }, + { + "epoch": 1.3084915084915085, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.9255, + "step": 1638 + }, + { + "epoch": 1.3092907092907093, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.9259, + "step": 1639 + }, + { + "epoch": 1.3100899100899102, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9192, + "step": 1640 + }, + { + "epoch": 1.3108891108891108, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9328, + "step": 1641 + }, + { + "epoch": 1.3116883116883118, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.924, + "step": 1642 + }, + { + "epoch": 1.3124875124875124, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9203, + "step": 1643 + }, + { + "epoch": 1.3132867132867134, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9187, + "step": 1644 + }, + { + "epoch": 1.314085914085914, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9188, + "step": 1645 + }, + { + "epoch": 1.3148851148851148, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.9228, + "step": 1646 + }, + { + "epoch": 1.3156843156843157, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9285, + "step": 1647 + }, + { + "epoch": 1.3164835164835165, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9266, + "step": 1648 + }, + { + "epoch": 1.3172827172827173, + "grad_norm": 1.171875, + "learning_rate": 0.0002, + "loss": 0.9343, + "step": 1649 + }, + { + "epoch": 1.318081918081918, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9264, + "step": 1650 + }, + { + "epoch": 1.318881118881119, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.9331, + "step": 1651 + }, + { + "epoch": 1.3196803196803197, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9195, + "step": 1652 + }, + { + "epoch": 1.3204795204795206, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.9265, + "step": 1653 + }, + { + "epoch": 1.3212787212787211, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9261, + "step": 1654 + }, + { + "epoch": 1.3220779220779222, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9263, + "step": 1655 + }, + { + "epoch": 1.3228771228771228, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.9294, + "step": 1656 + }, + { + "epoch": 1.3236763236763236, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.9233, + "step": 1657 + }, + { + "epoch": 1.3244755244755244, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9224, + "step": 1658 + }, + { + "epoch": 1.3252747252747252, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9194, + "step": 1659 + }, + { + "epoch": 1.326073926073926, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9215, + "step": 1660 + }, + { + "epoch": 1.3268731268731269, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9208, + "step": 1661 + }, + { + "epoch": 1.3276723276723277, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.931, + "step": 1662 + }, + { + "epoch": 1.3284715284715285, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.923, + "step": 1663 + }, + { + "epoch": 1.3292707292707293, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.9227, + "step": 1664 + }, + { + "epoch": 1.3300699300699301, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9281, + "step": 1665 + }, + { + "epoch": 1.330869130869131, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9226, + "step": 1666 + }, + { + "epoch": 1.3316683316683315, + "grad_norm": 0.62109375, + "learning_rate": 0.0002, + "loss": 0.9262, + "step": 1667 + }, + { + "epoch": 1.3324675324675326, + "grad_norm": 0.6328125, + "learning_rate": 0.0002, + "loss": 0.9305, + "step": 1668 + }, + { + "epoch": 1.3332667332667332, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9284, + "step": 1669 + }, + { + "epoch": 1.334065934065934, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.9264, + "step": 1670 + }, + { + "epoch": 1.3348651348651348, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9275, + "step": 1671 + }, + { + "epoch": 1.3356643356643356, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9347, + "step": 1672 + }, + { + "epoch": 1.3364635364635364, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.9211, + "step": 1673 + }, + { + "epoch": 1.3372627372627373, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.9169, + "step": 1674 + }, + { + "epoch": 1.338061938061938, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9223, + "step": 1675 + }, + { + "epoch": 1.3388611388611389, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.9265, + "step": 1676 + }, + { + "epoch": 1.3396603396603397, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9197, + "step": 1677 + }, + { + "epoch": 1.3404595404595405, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9263, + "step": 1678 + }, + { + "epoch": 1.3412587412587413, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9178, + "step": 1679 + }, + { + "epoch": 1.342057942057942, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9305, + "step": 1680 + }, + { + "epoch": 1.342857142857143, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.9141, + "step": 1681 + }, + { + "epoch": 1.3436563436563436, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9244, + "step": 1682 + }, + { + "epoch": 1.3444555444555444, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9247, + "step": 1683 + }, + { + "epoch": 1.3452547452547452, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.9266, + "step": 1684 + }, + { + "epoch": 1.346053946053946, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9315, + "step": 1685 + }, + { + "epoch": 1.3468531468531468, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9232, + "step": 1686 + }, + { + "epoch": 1.3476523476523476, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9296, + "step": 1687 + }, + { + "epoch": 1.3484515484515485, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9356, + "step": 1688 + }, + { + "epoch": 1.3492507492507493, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9159, + "step": 1689 + }, + { + "epoch": 1.35004995004995, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9257, + "step": 1690 + }, + { + "epoch": 1.350849150849151, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9244, + "step": 1691 + }, + { + "epoch": 1.3516483516483517, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.9273, + "step": 1692 + }, + { + "epoch": 1.3524475524475523, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9156, + "step": 1693 + }, + { + "epoch": 1.3532467532467534, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9237, + "step": 1694 + }, + { + "epoch": 1.354045954045954, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9216, + "step": 1695 + }, + { + "epoch": 1.3548451548451548, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.9229, + "step": 1696 + }, + { + "epoch": 1.3556443556443556, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.9251, + "step": 1697 + }, + { + "epoch": 1.3564435564435564, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9184, + "step": 1698 + }, + { + "epoch": 1.3572427572427572, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.9259, + "step": 1699 + }, + { + "epoch": 1.358041958041958, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9303, + "step": 1700 + }, + { + "epoch": 1.3588411588411589, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9186, + "step": 1701 + }, + { + "epoch": 1.3596403596403597, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.9302, + "step": 1702 + }, + { + "epoch": 1.3604395604395605, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9309, + "step": 1703 + }, + { + "epoch": 1.3612387612387613, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.9328, + "step": 1704 + }, + { + "epoch": 1.3620379620379621, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.9226, + "step": 1705 + }, + { + "epoch": 1.3628371628371627, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9268, + "step": 1706 + }, + { + "epoch": 1.3636363636363638, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.9216, + "step": 1707 + }, + { + "epoch": 1.3644355644355644, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9203, + "step": 1708 + }, + { + "epoch": 1.3652347652347652, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.9292, + "step": 1709 + }, + { + "epoch": 1.366033966033966, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9193, + "step": 1710 + }, + { + "epoch": 1.3668331668331668, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9306, + "step": 1711 + }, + { + "epoch": 1.3676323676323676, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9298, + "step": 1712 + }, + { + "epoch": 1.3684315684315684, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9246, + "step": 1713 + }, + { + "epoch": 1.3692307692307693, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9145, + "step": 1714 + }, + { + "epoch": 1.37002997002997, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.9244, + "step": 1715 + }, + { + "epoch": 1.3708291708291709, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9226, + "step": 1716 + }, + { + "epoch": 1.3716283716283717, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.9249, + "step": 1717 + }, + { + "epoch": 1.3724275724275725, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9278, + "step": 1718 + }, + { + "epoch": 1.3732267732267731, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9107, + "step": 1719 + }, + { + "epoch": 1.3740259740259742, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9182, + "step": 1720 + }, + { + "epoch": 1.3748251748251747, + "grad_norm": 0.60546875, + "learning_rate": 0.0002, + "loss": 0.9304, + "step": 1721 + }, + { + "epoch": 1.3756243756243756, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.9255, + "step": 1722 + }, + { + "epoch": 1.3764235764235764, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9191, + "step": 1723 + }, + { + "epoch": 1.3772227772227772, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9187, + "step": 1724 + }, + { + "epoch": 1.378021978021978, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9254, + "step": 1725 + }, + { + "epoch": 1.3788211788211788, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.9283, + "step": 1726 + }, + { + "epoch": 1.3796203796203796, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9352, + "step": 1727 + }, + { + "epoch": 1.3804195804195805, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9309, + "step": 1728 + }, + { + "epoch": 1.3812187812187813, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9187, + "step": 1729 + }, + { + "epoch": 1.382017982017982, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.924, + "step": 1730 + }, + { + "epoch": 1.382817182817183, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9196, + "step": 1731 + }, + { + "epoch": 1.3836163836163835, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9252, + "step": 1732 + }, + { + "epoch": 1.3844155844155845, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9367, + "step": 1733 + }, + { + "epoch": 1.3852147852147851, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.9195, + "step": 1734 + }, + { + "epoch": 1.386013986013986, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.914, + "step": 1735 + }, + { + "epoch": 1.3868131868131868, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.9247, + "step": 1736 + }, + { + "epoch": 1.3876123876123876, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.9209, + "step": 1737 + }, + { + "epoch": 1.3884115884115884, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.9294, + "step": 1738 + }, + { + "epoch": 1.3892107892107892, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.9253, + "step": 1739 + }, + { + "epoch": 1.39000999000999, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9262, + "step": 1740 + }, + { + "epoch": 1.3908091908091909, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9206, + "step": 1741 + }, + { + "epoch": 1.3916083916083917, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9234, + "step": 1742 + }, + { + "epoch": 1.3924075924075925, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.9262, + "step": 1743 + }, + { + "epoch": 1.3932067932067933, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9226, + "step": 1744 + }, + { + "epoch": 1.394005994005994, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.9744, + "step": 1745 + }, + { + "epoch": 1.394805194805195, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9275, + "step": 1746 + }, + { + "epoch": 1.3956043956043955, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9224, + "step": 1747 + }, + { + "epoch": 1.3964035964035963, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9326, + "step": 1748 + }, + { + "epoch": 1.3972027972027972, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.9184, + "step": 1749 + }, + { + "epoch": 1.398001998001998, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.9233, + "step": 1750 + }, + { + "epoch": 1.3988011988011988, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9207, + "step": 1751 + }, + { + "epoch": 1.3996003996003996, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9208, + "step": 1752 + }, + { + "epoch": 1.4003996003996004, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.9173, + "step": 1753 + }, + { + "epoch": 1.4011988011988012, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9237, + "step": 1754 + }, + { + "epoch": 1.401998001998002, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9148, + "step": 1755 + }, + { + "epoch": 1.4027972027972029, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9189, + "step": 1756 + }, + { + "epoch": 1.4035964035964037, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9215, + "step": 1757 + }, + { + "epoch": 1.4043956043956043, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9248, + "step": 1758 + }, + { + "epoch": 1.4051948051948053, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.9229, + "step": 1759 + }, + { + "epoch": 1.405994005994006, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9262, + "step": 1760 + }, + { + "epoch": 1.4067932067932067, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.9287, + "step": 1761 + }, + { + "epoch": 1.4075924075924076, + "grad_norm": 0.60546875, + "learning_rate": 0.0002, + "loss": 0.9216, + "step": 1762 + }, + { + "epoch": 1.4083916083916084, + "grad_norm": 0.68359375, + "learning_rate": 0.0002, + "loss": 0.916, + "step": 1763 + }, + { + "epoch": 1.4091908091908092, + "grad_norm": 0.828125, + "learning_rate": 0.0002, + "loss": 0.9273, + "step": 1764 + }, + { + "epoch": 1.40999000999001, + "grad_norm": 1.2265625, + "learning_rate": 0.0002, + "loss": 0.9288, + "step": 1765 + }, + { + "epoch": 1.4107892107892108, + "grad_norm": 1.21875, + "learning_rate": 0.0002, + "loss": 0.9143, + "step": 1766 + }, + { + "epoch": 1.4115884115884116, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.9218, + "step": 1767 + }, + { + "epoch": 1.4123876123876125, + "grad_norm": 0.62109375, + "learning_rate": 0.0002, + "loss": 0.9271, + "step": 1768 + }, + { + "epoch": 1.413186813186813, + "grad_norm": 3.40625, + "learning_rate": 0.0002, + "loss": 0.9477, + "step": 1769 + }, + { + "epoch": 1.413986013986014, + "grad_norm": 1.2890625, + "learning_rate": 0.0002, + "loss": 0.9314, + "step": 1770 + }, + { + "epoch": 1.4147852147852147, + "grad_norm": 2.53125, + "learning_rate": 0.0002, + "loss": 0.9195, + "step": 1771 + }, + { + "epoch": 1.4155844155844157, + "grad_norm": 1.9140625, + "learning_rate": 0.0002, + "loss": 0.9232, + "step": 1772 + }, + { + "epoch": 1.4163836163836163, + "grad_norm": 3.65625, + "learning_rate": 0.0002, + "loss": 0.9367, + "step": 1773 + }, + { + "epoch": 1.4171828171828171, + "grad_norm": 3.109375, + "learning_rate": 0.0002, + "loss": 0.9286, + "step": 1774 + }, + { + "epoch": 1.417982017982018, + "grad_norm": 0.609375, + "learning_rate": 0.0002, + "loss": 0.9226, + "step": 1775 + }, + { + "epoch": 1.4187812187812188, + "grad_norm": 1.9453125, + "learning_rate": 0.0002, + "loss": 0.9232, + "step": 1776 + }, + { + "epoch": 1.4195804195804196, + "grad_norm": 0.83984375, + "learning_rate": 0.0002, + "loss": 0.9251, + "step": 1777 + }, + { + "epoch": 1.4203796203796204, + "grad_norm": 2.28125, + "learning_rate": 0.0002, + "loss": 0.9291, + "step": 1778 + }, + { + "epoch": 1.4211788211788212, + "grad_norm": 1.515625, + "learning_rate": 0.0002, + "loss": 0.9371, + "step": 1779 + }, + { + "epoch": 1.421978021978022, + "grad_norm": 1.5, + "learning_rate": 0.0002, + "loss": 0.9387, + "step": 1780 + }, + { + "epoch": 1.4227772227772228, + "grad_norm": 1.0390625, + "learning_rate": 0.0002, + "loss": 0.9314, + "step": 1781 + }, + { + "epoch": 1.4235764235764234, + "grad_norm": 2.265625, + "learning_rate": 0.0002, + "loss": 0.9338, + "step": 1782 + }, + { + "epoch": 1.4243756243756245, + "grad_norm": 1.421875, + "learning_rate": 0.0002, + "loss": 0.9263, + "step": 1783 + }, + { + "epoch": 1.425174825174825, + "grad_norm": 1.96875, + "learning_rate": 0.0002, + "loss": 0.9263, + "step": 1784 + }, + { + "epoch": 1.425974025974026, + "grad_norm": 1.7265625, + "learning_rate": 0.0002, + "loss": 0.9312, + "step": 1785 + }, + { + "epoch": 1.4267732267732267, + "grad_norm": 1.0078125, + "learning_rate": 0.0002, + "loss": 0.9337, + "step": 1786 + }, + { + "epoch": 1.4275724275724275, + "grad_norm": 0.8671875, + "learning_rate": 0.0002, + "loss": 0.9338, + "step": 1787 + }, + { + "epoch": 1.4283716283716283, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.9272, + "step": 1788 + }, + { + "epoch": 1.4291708291708292, + "grad_norm": 0.59765625, + "learning_rate": 0.0002, + "loss": 0.9194, + "step": 1789 + }, + { + "epoch": 1.42997002997003, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.9239, + "step": 1790 + }, + { + "epoch": 1.4307692307692308, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.9361, + "step": 1791 + }, + { + "epoch": 1.4315684315684316, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9194, + "step": 1792 + }, + { + "epoch": 1.4323676323676324, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.925, + "step": 1793 + }, + { + "epoch": 1.4331668331668332, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.935, + "step": 1794 + }, + { + "epoch": 1.4339660339660338, + "grad_norm": 2.296875, + "learning_rate": 0.0002, + "loss": 0.9438, + "step": 1795 + }, + { + "epoch": 1.4347652347652349, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.9326, + "step": 1796 + }, + { + "epoch": 1.4355644355644355, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9221, + "step": 1797 + }, + { + "epoch": 1.4363636363636363, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.9208, + "step": 1798 + }, + { + "epoch": 1.437162837162837, + "grad_norm": 0.7265625, + "learning_rate": 0.0002, + "loss": 0.9262, + "step": 1799 + }, + { + "epoch": 1.437962037962038, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9325, + "step": 1800 + }, + { + "epoch": 1.4387612387612387, + "grad_norm": 0.64453125, + "learning_rate": 0.0002, + "loss": 0.9227, + "step": 1801 + }, + { + "epoch": 1.4395604395604396, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.9219, + "step": 1802 + }, + { + "epoch": 1.4403596403596404, + "grad_norm": 0.70703125, + "learning_rate": 0.0002, + "loss": 0.9357, + "step": 1803 + }, + { + "epoch": 1.4411588411588412, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.9292, + "step": 1804 + }, + { + "epoch": 1.441958041958042, + "grad_norm": 0.6015625, + "learning_rate": 0.0002, + "loss": 0.9234, + "step": 1805 + }, + { + "epoch": 1.4427572427572428, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9259, + "step": 1806 + }, + { + "epoch": 1.4435564435564436, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.917, + "step": 1807 + }, + { + "epoch": 1.4443556443556442, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9285, + "step": 1808 + }, + { + "epoch": 1.4451548451548453, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9231, + "step": 1809 + }, + { + "epoch": 1.4459540459540459, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9246, + "step": 1810 + }, + { + "epoch": 1.4467532467532467, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9267, + "step": 1811 + }, + { + "epoch": 1.4475524475524475, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9104, + "step": 1812 + }, + { + "epoch": 1.4483516483516483, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9295, + "step": 1813 + }, + { + "epoch": 1.4491508491508491, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.9242, + "step": 1814 + }, + { + "epoch": 1.44995004995005, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.9231, + "step": 1815 + }, + { + "epoch": 1.4507492507492508, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.9211, + "step": 1816 + }, + { + "epoch": 1.4515484515484516, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.9206, + "step": 1817 + }, + { + "epoch": 1.4523476523476524, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.928, + "step": 1818 + }, + { + "epoch": 1.4531468531468532, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9277, + "step": 1819 + }, + { + "epoch": 1.453946053946054, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9217, + "step": 1820 + }, + { + "epoch": 1.4547452547452546, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9277, + "step": 1821 + }, + { + "epoch": 1.4555444555444557, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9227, + "step": 1822 + }, + { + "epoch": 1.4563436563436563, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.9212, + "step": 1823 + }, + { + "epoch": 1.457142857142857, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.9127, + "step": 1824 + }, + { + "epoch": 1.457942057942058, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9256, + "step": 1825 + }, + { + "epoch": 1.4587412587412587, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.9224, + "step": 1826 + }, + { + "epoch": 1.4595404595404595, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.9263, + "step": 1827 + }, + { + "epoch": 1.4603396603396603, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9102, + "step": 1828 + }, + { + "epoch": 1.4611388611388612, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9187, + "step": 1829 + }, + { + "epoch": 1.461938061938062, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9257, + "step": 1830 + }, + { + "epoch": 1.4627372627372628, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9298, + "step": 1831 + }, + { + "epoch": 1.4635364635364636, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.965, + "step": 1832 + }, + { + "epoch": 1.4643356643356644, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.9263, + "step": 1833 + }, + { + "epoch": 1.465134865134865, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9196, + "step": 1834 + }, + { + "epoch": 1.465934065934066, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.9159, + "step": 1835 + }, + { + "epoch": 1.4667332667332666, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9178, + "step": 1836 + }, + { + "epoch": 1.4675324675324675, + "grad_norm": 1.40625, + "learning_rate": 0.0002, + "loss": 0.933, + "step": 1837 + }, + { + "epoch": 1.4683316683316683, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9259, + "step": 1838 + }, + { + "epoch": 1.469130869130869, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.9405, + "step": 1839 + }, + { + "epoch": 1.46993006993007, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.9384, + "step": 1840 + }, + { + "epoch": 1.4707292707292707, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9138, + "step": 1841 + }, + { + "epoch": 1.4715284715284715, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9343, + "step": 1842 + }, + { + "epoch": 1.4723276723276724, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.9213, + "step": 1843 + }, + { + "epoch": 1.4731268731268732, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9213, + "step": 1844 + }, + { + "epoch": 1.473926073926074, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.9189, + "step": 1845 + }, + { + "epoch": 1.4747252747252748, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9239, + "step": 1846 + }, + { + "epoch": 1.4755244755244754, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.926, + "step": 1847 + }, + { + "epoch": 1.4763236763236764, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9244, + "step": 1848 + }, + { + "epoch": 1.477122877122877, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9802, + "step": 1849 + }, + { + "epoch": 1.4779220779220779, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9271, + "step": 1850 + }, + { + "epoch": 1.4787212787212787, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.9238, + "step": 1851 + }, + { + "epoch": 1.4795204795204795, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9284, + "step": 1852 + }, + { + "epoch": 1.4803196803196803, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9142, + "step": 1853 + }, + { + "epoch": 1.4811188811188811, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.934, + "step": 1854 + }, + { + "epoch": 1.481918081918082, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9172, + "step": 1855 + }, + { + "epoch": 1.4827172827172828, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.9146, + "step": 1856 + }, + { + "epoch": 1.4835164835164836, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9128, + "step": 1857 + }, + { + "epoch": 1.4843156843156844, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9241, + "step": 1858 + }, + { + "epoch": 1.4851148851148852, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.9271, + "step": 1859 + }, + { + "epoch": 1.4859140859140858, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.9217, + "step": 1860 + }, + { + "epoch": 1.4867132867132868, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.926, + "step": 1861 + }, + { + "epoch": 1.4875124875124874, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.9276, + "step": 1862 + }, + { + "epoch": 1.4883116883116883, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.9231, + "step": 1863 + }, + { + "epoch": 1.489110889110889, + "grad_norm": 1.2890625, + "learning_rate": 0.0002, + "loss": 0.9495, + "step": 1864 + }, + { + "epoch": 1.4899100899100899, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9296, + "step": 1865 + }, + { + "epoch": 1.4907092907092907, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.9198, + "step": 1866 + }, + { + "epoch": 1.4915084915084915, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.9199, + "step": 1867 + }, + { + "epoch": 1.4923076923076923, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.9312, + "step": 1868 + }, + { + "epoch": 1.4931068931068932, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.9228, + "step": 1869 + }, + { + "epoch": 1.493906093906094, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.9199, + "step": 1870 + }, + { + "epoch": 1.4947052947052948, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.9256, + "step": 1871 + }, + { + "epoch": 1.4955044955044956, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9222, + "step": 1872 + }, + { + "epoch": 1.4963036963036962, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.92, + "step": 1873 + }, + { + "epoch": 1.4971028971028972, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9204, + "step": 1874 + }, + { + "epoch": 1.4979020979020978, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9293, + "step": 1875 + }, + { + "epoch": 1.4987012987012986, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9285, + "step": 1876 + }, + { + "epoch": 1.4995004995004995, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.921, + "step": 1877 + }, + { + "epoch": 1.5002997002997003, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9254, + "step": 1878 + }, + { + "epoch": 1.501098901098901, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.9233, + "step": 1879 + }, + { + "epoch": 1.501898101898102, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9268, + "step": 1880 + }, + { + "epoch": 1.5026973026973027, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9226, + "step": 1881 + }, + { + "epoch": 1.5034965034965035, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.9211, + "step": 1882 + }, + { + "epoch": 1.5042957042957044, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9236, + "step": 1883 + }, + { + "epoch": 1.505094905094905, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.9127, + "step": 1884 + }, + { + "epoch": 1.505894105894106, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.9237, + "step": 1885 + }, + { + "epoch": 1.5066933066933066, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.9351, + "step": 1886 + }, + { + "epoch": 1.5074925074925076, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.9205, + "step": 1887 + }, + { + "epoch": 1.5082917082917082, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.9243, + "step": 1888 + }, + { + "epoch": 1.509090909090909, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9157, + "step": 1889 + }, + { + "epoch": 1.5098901098901099, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9148, + "step": 1890 + }, + { + "epoch": 1.5106893106893107, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9229, + "step": 1891 + }, + { + "epoch": 1.5114885114885115, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.9153, + "step": 1892 + }, + { + "epoch": 1.5122877122877123, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.9229, + "step": 1893 + }, + { + "epoch": 1.5130869130869131, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.9139, + "step": 1894 + }, + { + "epoch": 1.513886113886114, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.9211, + "step": 1895 + }, + { + "epoch": 1.5146853146853148, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.9247, + "step": 1896 + }, + { + "epoch": 1.5154845154845153, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9173, + "step": 1897 + }, + { + "epoch": 1.5162837162837164, + "grad_norm": 2.703125, + "learning_rate": 0.0002, + "loss": 0.9576, + "step": 1898 + }, + { + "epoch": 1.517082917082917, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.9182, + "step": 1899 + }, + { + "epoch": 1.517882117882118, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 0.9245, + "step": 1900 + }, + { + "epoch": 1.5186813186813186, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.9179, + "step": 1901 + }, + { + "epoch": 1.5194805194805194, + "grad_norm": 0.6015625, + "learning_rate": 0.0002, + "loss": 0.9286, + "step": 1902 + }, + { + "epoch": 1.5202797202797202, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9309, + "step": 1903 + }, + { + "epoch": 1.521078921078921, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.9206, + "step": 1904 + }, + { + "epoch": 1.5218781218781219, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.9251, + "step": 1905 + }, + { + "epoch": 1.5226773226773227, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9211, + "step": 1906 + }, + { + "epoch": 1.5234765234765235, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.9158, + "step": 1907 + }, + { + "epoch": 1.5242757242757243, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9149, + "step": 1908 + }, + { + "epoch": 1.5250749250749251, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.9223, + "step": 1909 + }, + { + "epoch": 1.5258741258741257, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9214, + "step": 1910 + }, + { + "epoch": 1.5266733266733268, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9312, + "step": 1911 + }, + { + "epoch": 1.5274725274725274, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.9359, + "step": 1912 + }, + { + "epoch": 1.5282717282717284, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9163, + "step": 1913 + }, + { + "epoch": 1.529070929070929, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.9192, + "step": 1914 + }, + { + "epoch": 1.5298701298701298, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9258, + "step": 1915 + }, + { + "epoch": 1.5306693306693306, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9193, + "step": 1916 + }, + { + "epoch": 1.5314685314685315, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9118, + "step": 1917 + }, + { + "epoch": 1.5322677322677323, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.9128, + "step": 1918 + }, + { + "epoch": 1.533066933066933, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.9221, + "step": 1919 + }, + { + "epoch": 1.533866133866134, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.9177, + "step": 1920 + }, + { + "epoch": 1.5346653346653345, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.975, + "step": 1921 + }, + { + "epoch": 1.5354645354645355, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9204, + "step": 1922 + }, + { + "epoch": 1.5362637362637361, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.9138, + "step": 1923 + }, + { + "epoch": 1.5370629370629372, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9237, + "step": 1924 + }, + { + "epoch": 1.5378621378621378, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9233, + "step": 1925 + }, + { + "epoch": 1.5386613386613388, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.9283, + "step": 1926 + }, + { + "epoch": 1.5394605394605394, + "grad_norm": 0.64453125, + "learning_rate": 0.0002, + "loss": 0.9235, + "step": 1927 + }, + { + "epoch": 1.5402597402597402, + "grad_norm": 0.7421875, + "learning_rate": 0.0002, + "loss": 0.916, + "step": 1928 + }, + { + "epoch": 1.541058941058941, + "grad_norm": 0.73046875, + "learning_rate": 0.0002, + "loss": 0.923, + "step": 1929 + }, + { + "epoch": 1.5418581418581419, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.923, + "step": 1930 + }, + { + "epoch": 1.5426573426573427, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.9125, + "step": 1931 + }, + { + "epoch": 1.5434565434565435, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 0.9243, + "step": 1932 + }, + { + "epoch": 1.5442557442557443, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9218, + "step": 1933 + }, + { + "epoch": 1.545054945054945, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.918, + "step": 1934 + }, + { + "epoch": 1.545854145854146, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.928, + "step": 1935 + }, + { + "epoch": 1.5466533466533465, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9232, + "step": 1936 + }, + { + "epoch": 1.5474525474525476, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9294, + "step": 1937 + }, + { + "epoch": 1.5482517482517482, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9232, + "step": 1938 + }, + { + "epoch": 1.5490509490509492, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9121, + "step": 1939 + }, + { + "epoch": 1.5498501498501498, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.9302, + "step": 1940 + }, + { + "epoch": 1.5506493506493506, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9285, + "step": 1941 + }, + { + "epoch": 1.5514485514485514, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9259, + "step": 1942 + }, + { + "epoch": 1.5522477522477522, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9254, + "step": 1943 + }, + { + "epoch": 1.553046953046953, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.922, + "step": 1944 + }, + { + "epoch": 1.5538461538461539, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9268, + "step": 1945 + }, + { + "epoch": 1.5546453546453547, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9206, + "step": 1946 + }, + { + "epoch": 1.5554445554445553, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9234, + "step": 1947 + }, + { + "epoch": 1.5562437562437563, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9289, + "step": 1948 + }, + { + "epoch": 1.557042957042957, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9165, + "step": 1949 + }, + { + "epoch": 1.557842157842158, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9198, + "step": 1950 + }, + { + "epoch": 1.5586413586413586, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9199, + "step": 1951 + }, + { + "epoch": 1.5594405594405596, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9158, + "step": 1952 + }, + { + "epoch": 1.5602397602397602, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9258, + "step": 1953 + }, + { + "epoch": 1.561038961038961, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.9217, + "step": 1954 + }, + { + "epoch": 1.5618381618381618, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.9165, + "step": 1955 + }, + { + "epoch": 1.5626373626373626, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.9206, + "step": 1956 + }, + { + "epoch": 1.5634365634365635, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.9163, + "step": 1957 + }, + { + "epoch": 1.5642357642357643, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9213, + "step": 1958 + }, + { + "epoch": 1.565034965034965, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9227, + "step": 1959 + }, + { + "epoch": 1.5658341658341657, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9156, + "step": 1960 + }, + { + "epoch": 1.5666333666333667, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9204, + "step": 1961 + }, + { + "epoch": 1.5674325674325673, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9161, + "step": 1962 + }, + { + "epoch": 1.5682317682317684, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.9226, + "step": 1963 + }, + { + "epoch": 1.569030969030969, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9131, + "step": 1964 + }, + { + "epoch": 1.56983016983017, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9197, + "step": 1965 + }, + { + "epoch": 1.5706293706293706, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.9256, + "step": 1966 + }, + { + "epoch": 1.5714285714285714, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.9268, + "step": 1967 + }, + { + "epoch": 1.5722277722277722, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9152, + "step": 1968 + }, + { + "epoch": 1.573026973026973, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9224, + "step": 1969 + }, + { + "epoch": 1.5738261738261738, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9133, + "step": 1970 + }, + { + "epoch": 1.5746253746253747, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.922, + "step": 1971 + }, + { + "epoch": 1.5754245754245755, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9315, + "step": 1972 + }, + { + "epoch": 1.576223776223776, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9208, + "step": 1973 + }, + { + "epoch": 1.5770229770229771, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9181, + "step": 1974 + }, + { + "epoch": 1.5778221778221777, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.9235, + "step": 1975 + }, + { + "epoch": 1.5786213786213787, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.918, + "step": 1976 + }, + { + "epoch": 1.5794205794205793, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.9173, + "step": 1977 + }, + { + "epoch": 1.5802197802197804, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9213, + "step": 1978 + }, + { + "epoch": 1.581018981018981, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9172, + "step": 1979 + }, + { + "epoch": 1.5818181818181818, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9162, + "step": 1980 + }, + { + "epoch": 1.5826173826173826, + "grad_norm": 0.63671875, + "learning_rate": 0.0002, + "loss": 0.9217, + "step": 1981 + }, + { + "epoch": 1.5834165834165834, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.92, + "step": 1982 + }, + { + "epoch": 1.5842157842157842, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.9096, + "step": 1983 + }, + { + "epoch": 1.585014985014985, + "grad_norm": 0.640625, + "learning_rate": 0.0002, + "loss": 0.9194, + "step": 1984 + }, + { + "epoch": 1.5858141858141859, + "grad_norm": 0.63671875, + "learning_rate": 0.0002, + "loss": 0.9164, + "step": 1985 + }, + { + "epoch": 1.5866133866133865, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.9202, + "step": 1986 + }, + { + "epoch": 1.5874125874125875, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.9187, + "step": 1987 + }, + { + "epoch": 1.588211788211788, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.9282, + "step": 1988 + }, + { + "epoch": 1.5890109890109891, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.921, + "step": 1989 + }, + { + "epoch": 1.5898101898101897, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9149, + "step": 1990 + }, + { + "epoch": 1.5906093906093908, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.9214, + "step": 1991 + }, + { + "epoch": 1.5914085914085914, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.9211, + "step": 1992 + }, + { + "epoch": 1.5922077922077922, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9127, + "step": 1993 + }, + { + "epoch": 1.593006993006993, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.9209, + "step": 1994 + }, + { + "epoch": 1.5938061938061938, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9268, + "step": 1995 + }, + { + "epoch": 1.5946053946053946, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9212, + "step": 1996 + }, + { + "epoch": 1.5954045954045954, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9136, + "step": 1997 + }, + { + "epoch": 1.5962037962037963, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.9294, + "step": 1998 + }, + { + "epoch": 1.5970029970029969, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.932, + "step": 1999 + }, + { + "epoch": 1.597802197802198, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9216, + "step": 2000 + }, + { + "epoch": 1.5986013986013985, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.9167, + "step": 2001 + }, + { + "epoch": 1.5994005994005995, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9213, + "step": 2002 + }, + { + "epoch": 1.6001998001998001, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9136, + "step": 2003 + }, + { + "epoch": 1.6009990009990012, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9263, + "step": 2004 + }, + { + "epoch": 1.6017982017982018, + "grad_norm": 0.70703125, + "learning_rate": 0.0002, + "loss": 0.9144, + "step": 2005 + }, + { + "epoch": 1.6025974025974026, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.9223, + "step": 2006 + }, + { + "epoch": 1.6033966033966034, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.9288, + "step": 2007 + }, + { + "epoch": 1.6041958041958042, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9185, + "step": 2008 + }, + { + "epoch": 1.604995004995005, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9255, + "step": 2009 + }, + { + "epoch": 1.6057942057942058, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.9198, + "step": 2010 + }, + { + "epoch": 1.6065934065934067, + "grad_norm": 0.671875, + "learning_rate": 0.0002, + "loss": 0.9575, + "step": 2011 + }, + { + "epoch": 1.6073926073926073, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9151, + "step": 2012 + }, + { + "epoch": 1.6081918081918083, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.9247, + "step": 2013 + }, + { + "epoch": 1.6089910089910089, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9137, + "step": 2014 + }, + { + "epoch": 1.60979020979021, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9193, + "step": 2015 + }, + { + "epoch": 1.6105894105894105, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.918, + "step": 2016 + }, + { + "epoch": 1.6113886113886113, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.9239, + "step": 2017 + }, + { + "epoch": 1.6121878121878122, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9244, + "step": 2018 + }, + { + "epoch": 1.612987012987013, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9155, + "step": 2019 + }, + { + "epoch": 1.6137862137862138, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.925, + "step": 2020 + }, + { + "epoch": 1.6145854145854146, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.9297, + "step": 2021 + }, + { + "epoch": 1.6153846153846154, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9247, + "step": 2022 + }, + { + "epoch": 1.6161838161838162, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.9253, + "step": 2023 + }, + { + "epoch": 1.616983016983017, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9177, + "step": 2024 + }, + { + "epoch": 1.6177822177822176, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9248, + "step": 2025 + }, + { + "epoch": 1.6185814185814187, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.9202, + "step": 2026 + }, + { + "epoch": 1.6193806193806193, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9278, + "step": 2027 + }, + { + "epoch": 1.6201798201798203, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.9149, + "step": 2028 + }, + { + "epoch": 1.620979020979021, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.9128, + "step": 2029 + }, + { + "epoch": 1.6217782217782217, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9186, + "step": 2030 + }, + { + "epoch": 1.6225774225774225, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.909, + "step": 2031 + }, + { + "epoch": 1.6233766233766234, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.9128, + "step": 2032 + }, + { + "epoch": 1.6241758241758242, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9209, + "step": 2033 + }, + { + "epoch": 1.624975024975025, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9093, + "step": 2034 + }, + { + "epoch": 1.6257742257742258, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.9174, + "step": 2035 + }, + { + "epoch": 1.6265734265734266, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9185, + "step": 2036 + }, + { + "epoch": 1.6273726273726274, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.9119, + "step": 2037 + }, + { + "epoch": 1.628171828171828, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9291, + "step": 2038 + }, + { + "epoch": 1.628971028971029, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9189, + "step": 2039 + }, + { + "epoch": 1.6297702297702297, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.9301, + "step": 2040 + }, + { + "epoch": 1.6305694305694307, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9111, + "step": 2041 + }, + { + "epoch": 1.6313686313686313, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9079, + "step": 2042 + }, + { + "epoch": 1.6321678321678321, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.9065, + "step": 2043 + }, + { + "epoch": 1.632967032967033, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.9173, + "step": 2044 + }, + { + "epoch": 1.6337662337662338, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9174, + "step": 2045 + }, + { + "epoch": 1.6345654345654346, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9206, + "step": 2046 + }, + { + "epoch": 1.6353646353646354, + "grad_norm": 0.71484375, + "learning_rate": 0.0002, + "loss": 0.9367, + "step": 2047 + }, + { + "epoch": 1.6361638361638362, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.9154, + "step": 2048 + }, + { + "epoch": 1.6369630369630368, + "grad_norm": 0.796875, + "learning_rate": 0.0002, + "loss": 0.9629, + "step": 2049 + }, + { + "epoch": 1.6377622377622378, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.9183, + "step": 2050 + }, + { + "epoch": 1.6385614385614384, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.9139, + "step": 2051 + }, + { + "epoch": 1.6393606393606395, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9218, + "step": 2052 + }, + { + "epoch": 1.64015984015984, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9215, + "step": 2053 + }, + { + "epoch": 1.640959040959041, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9125, + "step": 2054 + }, + { + "epoch": 1.6417582417582417, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.9304, + "step": 2055 + }, + { + "epoch": 1.6425574425574425, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9159, + "step": 2056 + }, + { + "epoch": 1.6433566433566433, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9231, + "step": 2057 + }, + { + "epoch": 1.6441558441558441, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9292, + "step": 2058 + }, + { + "epoch": 1.644955044955045, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.9275, + "step": 2059 + }, + { + "epoch": 1.6457542457542458, + "grad_norm": 0.65625, + "learning_rate": 0.0002, + "loss": 0.9215, + "step": 2060 + }, + { + "epoch": 1.6465534465534466, + "grad_norm": 0.59375, + "learning_rate": 0.0002, + "loss": 0.9192, + "step": 2061 + }, + { + "epoch": 1.6473526473526472, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.9185, + "step": 2062 + }, + { + "epoch": 1.6481518481518482, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.9234, + "step": 2063 + }, + { + "epoch": 1.6489510489510488, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.915, + "step": 2064 + }, + { + "epoch": 1.6497502497502499, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9186, + "step": 2065 + }, + { + "epoch": 1.6505494505494505, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9167, + "step": 2066 + }, + { + "epoch": 1.6513486513486515, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.9107, + "step": 2067 + }, + { + "epoch": 1.652147852147852, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.9171, + "step": 2068 + }, + { + "epoch": 1.652947052947053, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.9296, + "step": 2069 + }, + { + "epoch": 1.6537462537462537, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9096, + "step": 2070 + }, + { + "epoch": 1.6545454545454545, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9253, + "step": 2071 + }, + { + "epoch": 1.6553446553446554, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.9077, + "step": 2072 + }, + { + "epoch": 1.6561438561438562, + "grad_norm": 0.671875, + "learning_rate": 0.0002, + "loss": 0.9592, + "step": 2073 + }, + { + "epoch": 1.656943056943057, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.926, + "step": 2074 + }, + { + "epoch": 1.6577422577422576, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9142, + "step": 2075 + }, + { + "epoch": 1.6585414585414586, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9164, + "step": 2076 + }, + { + "epoch": 1.6593406593406592, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.9171, + "step": 2077 + }, + { + "epoch": 1.6601398601398603, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.9246, + "step": 2078 + }, + { + "epoch": 1.6609390609390609, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.9234, + "step": 2079 + }, + { + "epoch": 1.661738261738262, + "grad_norm": 0.66796875, + "learning_rate": 0.0002, + "loss": 0.9187, + "step": 2080 + }, + { + "epoch": 1.6625374625374625, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9178, + "step": 2081 + }, + { + "epoch": 1.6633366633366633, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9239, + "step": 2082 + }, + { + "epoch": 1.6641358641358641, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9217, + "step": 2083 + }, + { + "epoch": 1.664935064935065, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9149, + "step": 2084 + }, + { + "epoch": 1.6657342657342658, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9173, + "step": 2085 + }, + { + "epoch": 1.6665334665334666, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.9154, + "step": 2086 + }, + { + "epoch": 1.6673326673326674, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9106, + "step": 2087 + }, + { + "epoch": 1.668131868131868, + "grad_norm": 0.91015625, + "learning_rate": 0.0002, + "loss": 0.9596, + "step": 2088 + }, + { + "epoch": 1.668931068931069, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9086, + "step": 2089 + }, + { + "epoch": 1.6697302697302696, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.9138, + "step": 2090 + }, + { + "epoch": 1.6705294705294707, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9243, + "step": 2091 + }, + { + "epoch": 1.6713286713286712, + "grad_norm": 0.578125, + "learning_rate": 0.0002, + "loss": 0.926, + "step": 2092 + }, + { + "epoch": 1.6721278721278723, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9198, + "step": 2093 + }, + { + "epoch": 1.6729270729270729, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9263, + "step": 2094 + }, + { + "epoch": 1.6737262737262737, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9124, + "step": 2095 + }, + { + "epoch": 1.6745254745254745, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.919, + "step": 2096 + }, + { + "epoch": 1.6753246753246753, + "grad_norm": 1.75, + "learning_rate": 0.0002, + "loss": 0.9461, + "step": 2097 + }, + { + "epoch": 1.6761238761238761, + "grad_norm": 0.66015625, + "learning_rate": 0.0002, + "loss": 0.9374, + "step": 2098 + }, + { + "epoch": 1.676923076923077, + "grad_norm": 0.6875, + "learning_rate": 0.0002, + "loss": 0.9201, + "step": 2099 + }, + { + "epoch": 1.6777222777222778, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9268, + "step": 2100 + }, + { + "epoch": 1.6785214785214784, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9257, + "step": 2101 + }, + { + "epoch": 1.6793206793206794, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9213, + "step": 2102 + }, + { + "epoch": 1.68011988011988, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.927, + "step": 2103 + }, + { + "epoch": 1.680919080919081, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.9173, + "step": 2104 + }, + { + "epoch": 1.6817182817182816, + "grad_norm": 1.1875, + "learning_rate": 0.0002, + "loss": 0.9561, + "step": 2105 + }, + { + "epoch": 1.6825174825174827, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9239, + "step": 2106 + }, + { + "epoch": 1.6833166833166833, + "grad_norm": 0.73046875, + "learning_rate": 0.0002, + "loss": 0.9257, + "step": 2107 + }, + { + "epoch": 1.684115884115884, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.9215, + "step": 2108 + }, + { + "epoch": 1.684915084915085, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.9515, + "step": 2109 + }, + { + "epoch": 1.6857142857142857, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9127, + "step": 2110 + }, + { + "epoch": 1.6865134865134865, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.9163, + "step": 2111 + }, + { + "epoch": 1.6873126873126874, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.924, + "step": 2112 + }, + { + "epoch": 1.6881118881118882, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.9318, + "step": 2113 + }, + { + "epoch": 1.6889110889110888, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.9206, + "step": 2114 + }, + { + "epoch": 1.6897102897102898, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9255, + "step": 2115 + }, + { + "epoch": 1.6905094905094904, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9744, + "step": 2116 + }, + { + "epoch": 1.6913086913086914, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.9242, + "step": 2117 + }, + { + "epoch": 1.692107892107892, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9187, + "step": 2118 + }, + { + "epoch": 1.692907092907093, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.9153, + "step": 2119 + }, + { + "epoch": 1.6937062937062937, + "grad_norm": 0.71484375, + "learning_rate": 0.0002, + "loss": 0.9145, + "step": 2120 + }, + { + "epoch": 1.6945054945054945, + "grad_norm": 0.6953125, + "learning_rate": 0.0002, + "loss": 0.9229, + "step": 2121 + }, + { + "epoch": 1.6953046953046953, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.9323, + "step": 2122 + }, + { + "epoch": 1.6961038961038961, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9185, + "step": 2123 + }, + { + "epoch": 1.696903096903097, + "grad_norm": 1.2265625, + "learning_rate": 0.0002, + "loss": 0.9546, + "step": 2124 + }, + { + "epoch": 1.6977022977022977, + "grad_norm": 0.61328125, + "learning_rate": 0.0002, + "loss": 0.9146, + "step": 2125 + }, + { + "epoch": 1.6985014985014986, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9225, + "step": 2126 + }, + { + "epoch": 1.6993006993006992, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.908, + "step": 2127 + }, + { + "epoch": 1.7000999000999002, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9261, + "step": 2128 + }, + { + "epoch": 1.7008991008991008, + "grad_norm": 0.62109375, + "learning_rate": 0.0002, + "loss": 0.9604, + "step": 2129 + }, + { + "epoch": 1.7016983016983018, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9175, + "step": 2130 + }, + { + "epoch": 1.7024975024975024, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.9197, + "step": 2131 + }, + { + "epoch": 1.7032967032967035, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9179, + "step": 2132 + }, + { + "epoch": 1.704095904095904, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9208, + "step": 2133 + }, + { + "epoch": 1.7048951048951049, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.92, + "step": 2134 + }, + { + "epoch": 1.7056943056943057, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9144, + "step": 2135 + }, + { + "epoch": 1.7064935064935065, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9213, + "step": 2136 + }, + { + "epoch": 1.7072927072927073, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9213, + "step": 2137 + }, + { + "epoch": 1.7080919080919081, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.9139, + "step": 2138 + }, + { + "epoch": 1.708891108891109, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.9281, + "step": 2139 + }, + { + "epoch": 1.7096903096903096, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9219, + "step": 2140 + }, + { + "epoch": 1.7104895104895106, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.922, + "step": 2141 + }, + { + "epoch": 1.7112887112887112, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.9122, + "step": 2142 + }, + { + "epoch": 1.7120879120879122, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.9133, + "step": 2143 + }, + { + "epoch": 1.7128871128871128, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9153, + "step": 2144 + }, + { + "epoch": 1.7136863136863136, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.9225, + "step": 2145 + }, + { + "epoch": 1.7144855144855145, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.9251, + "step": 2146 + }, + { + "epoch": 1.7152847152847153, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.9249, + "step": 2147 + }, + { + "epoch": 1.716083916083916, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9147, + "step": 2148 + }, + { + "epoch": 1.716883116883117, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9139, + "step": 2149 + }, + { + "epoch": 1.7176823176823177, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.9182, + "step": 2150 + }, + { + "epoch": 1.7184815184815185, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9239, + "step": 2151 + }, + { + "epoch": 1.7192807192807193, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.9208, + "step": 2152 + }, + { + "epoch": 1.72007992007992, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.9119, + "step": 2153 + }, + { + "epoch": 1.720879120879121, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9161, + "step": 2154 + }, + { + "epoch": 1.7216783216783216, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9232, + "step": 2155 + }, + { + "epoch": 1.7224775224775226, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9202, + "step": 2156 + }, + { + "epoch": 1.7232767232767232, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9047, + "step": 2157 + }, + { + "epoch": 1.724075924075924, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.927, + "step": 2158 + }, + { + "epoch": 1.7248751248751248, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.9273, + "step": 2159 + }, + { + "epoch": 1.7256743256743257, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9179, + "step": 2160 + }, + { + "epoch": 1.7264735264735265, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9294, + "step": 2161 + }, + { + "epoch": 1.7272727272727273, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9111, + "step": 2162 + }, + { + "epoch": 1.728071928071928, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9077, + "step": 2163 + }, + { + "epoch": 1.728871128871129, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9085, + "step": 2164 + }, + { + "epoch": 1.7296703296703297, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9154, + "step": 2165 + }, + { + "epoch": 1.7304695304695303, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9227, + "step": 2166 + }, + { + "epoch": 1.7312687312687314, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.923, + "step": 2167 + }, + { + "epoch": 1.732067932067932, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9218, + "step": 2168 + }, + { + "epoch": 1.732867132867133, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9177, + "step": 2169 + }, + { + "epoch": 1.7336663336663336, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9146, + "step": 2170 + }, + { + "epoch": 1.7344655344655344, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9132, + "step": 2171 + }, + { + "epoch": 1.7352647352647352, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.9164, + "step": 2172 + }, + { + "epoch": 1.736063936063936, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9087, + "step": 2173 + }, + { + "epoch": 1.7368631368631369, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.929, + "step": 2174 + }, + { + "epoch": 1.7376623376623377, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9109, + "step": 2175 + }, + { + "epoch": 1.7384615384615385, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.9208, + "step": 2176 + }, + { + "epoch": 1.739260739260739, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.9193, + "step": 2177 + }, + { + "epoch": 1.7400599400599401, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9262, + "step": 2178 + }, + { + "epoch": 1.7408591408591407, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.9248, + "step": 2179 + }, + { + "epoch": 1.7416583416583418, + "grad_norm": 0.86328125, + "learning_rate": 0.0002, + "loss": 0.9198, + "step": 2180 + }, + { + "epoch": 1.7424575424575424, + "grad_norm": 1.4765625, + "learning_rate": 0.0002, + "loss": 0.9124, + "step": 2181 + }, + { + "epoch": 1.7432567432567434, + "grad_norm": 1.0234375, + "learning_rate": 0.0002, + "loss": 0.9171, + "step": 2182 + }, + { + "epoch": 1.744055944055944, + "grad_norm": 0.84375, + "learning_rate": 0.0002, + "loss": 0.9242, + "step": 2183 + }, + { + "epoch": 1.7448551448551448, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9103, + "step": 2184 + }, + { + "epoch": 1.7456543456543456, + "grad_norm": 1.078125, + "learning_rate": 0.0002, + "loss": 0.9221, + "step": 2185 + }, + { + "epoch": 1.7464535464535464, + "grad_norm": 1.09375, + "learning_rate": 0.0002, + "loss": 0.913, + "step": 2186 + }, + { + "epoch": 1.7472527472527473, + "grad_norm": 0.640625, + "learning_rate": 0.0002, + "loss": 0.9158, + "step": 2187 + }, + { + "epoch": 1.748051948051948, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.917, + "step": 2188 + }, + { + "epoch": 1.748851148851149, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.9123, + "step": 2189 + }, + { + "epoch": 1.7496503496503495, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.9217, + "step": 2190 + }, + { + "epoch": 1.7504495504495505, + "grad_norm": 0.80078125, + "learning_rate": 0.0002, + "loss": 0.9244, + "step": 2191 + }, + { + "epoch": 1.7512487512487511, + "grad_norm": 0.6953125, + "learning_rate": 0.0002, + "loss": 0.9114, + "step": 2192 + }, + { + "epoch": 1.7520479520479522, + "grad_norm": 0.5859375, + "learning_rate": 0.0002, + "loss": 0.9177, + "step": 2193 + }, + { + "epoch": 1.7528471528471528, + "grad_norm": 0.66796875, + "learning_rate": 0.0002, + "loss": 0.9157, + "step": 2194 + }, + { + "epoch": 1.7536463536463538, + "grad_norm": 0.7421875, + "learning_rate": 0.0002, + "loss": 0.9155, + "step": 2195 + }, + { + "epoch": 1.7544455544455544, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.9145, + "step": 2196 + }, + { + "epoch": 1.7552447552447552, + "grad_norm": 0.73828125, + "learning_rate": 0.0002, + "loss": 0.9221, + "step": 2197 + }, + { + "epoch": 1.756043956043956, + "grad_norm": 0.82421875, + "learning_rate": 0.0002, + "loss": 0.9047, + "step": 2198 + }, + { + "epoch": 1.7568431568431568, + "grad_norm": 0.82421875, + "learning_rate": 0.0002, + "loss": 0.9107, + "step": 2199 + }, + { + "epoch": 1.7576423576423577, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.9247, + "step": 2200 + }, + { + "epoch": 1.7584415584415585, + "grad_norm": 0.91015625, + "learning_rate": 0.0002, + "loss": 0.9219, + "step": 2201 + }, + { + "epoch": 1.7592407592407593, + "grad_norm": 0.625, + "learning_rate": 0.0002, + "loss": 0.9231, + "step": 2202 + }, + { + "epoch": 1.7600399600399599, + "grad_norm": 0.69140625, + "learning_rate": 0.0002, + "loss": 0.9155, + "step": 2203 + }, + { + "epoch": 1.760839160839161, + "grad_norm": 0.578125, + "learning_rate": 0.0002, + "loss": 0.9032, + "step": 2204 + }, + { + "epoch": 1.7616383616383615, + "grad_norm": 0.640625, + "learning_rate": 0.0002, + "loss": 0.9189, + "step": 2205 + }, + { + "epoch": 1.7624375624375626, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.9189, + "step": 2206 + }, + { + "epoch": 1.7632367632367631, + "grad_norm": 0.58203125, + "learning_rate": 0.0002, + "loss": 0.9169, + "step": 2207 + }, + { + "epoch": 1.7640359640359642, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.9115, + "step": 2208 + }, + { + "epoch": 1.7648351648351648, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.9204, + "step": 2209 + }, + { + "epoch": 1.7656343656343656, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9158, + "step": 2210 + }, + { + "epoch": 1.7664335664335664, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9124, + "step": 2211 + }, + { + "epoch": 1.7672327672327672, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.9158, + "step": 2212 + }, + { + "epoch": 1.768031968031968, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9089, + "step": 2213 + }, + { + "epoch": 1.7688311688311689, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.9684, + "step": 2214 + }, + { + "epoch": 1.7696303696303697, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.9149, + "step": 2215 + }, + { + "epoch": 1.7704295704295703, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9209, + "step": 2216 + }, + { + "epoch": 1.7712287712287713, + "grad_norm": 0.609375, + "learning_rate": 0.0002, + "loss": 0.9176, + "step": 2217 + }, + { + "epoch": 1.772027972027972, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9216, + "step": 2218 + }, + { + "epoch": 1.772827172827173, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9183, + "step": 2219 + }, + { + "epoch": 1.7736263736263735, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9137, + "step": 2220 + }, + { + "epoch": 1.7744255744255746, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9051, + "step": 2221 + }, + { + "epoch": 1.7752247752247752, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9092, + "step": 2222 + }, + { + "epoch": 1.776023976023976, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.9277, + "step": 2223 + }, + { + "epoch": 1.7768231768231768, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9234, + "step": 2224 + }, + { + "epoch": 1.7776223776223776, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 2225 + }, + { + "epoch": 1.7784215784215784, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9054, + "step": 2226 + }, + { + "epoch": 1.7792207792207793, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9158, + "step": 2227 + }, + { + "epoch": 1.78001998001998, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.9209, + "step": 2228 + }, + { + "epoch": 1.7808191808191807, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9111, + "step": 2229 + }, + { + "epoch": 1.7816183816183817, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9183, + "step": 2230 + }, + { + "epoch": 1.7824175824175823, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.9152, + "step": 2231 + }, + { + "epoch": 1.7832167832167833, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.92, + "step": 2232 + }, + { + "epoch": 1.784015984015984, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9211, + "step": 2233 + }, + { + "epoch": 1.784815184815185, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.9286, + "step": 2234 + }, + { + "epoch": 1.7856143856143856, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9184, + "step": 2235 + }, + { + "epoch": 1.7864135864135864, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9176, + "step": 2236 + }, + { + "epoch": 1.7872127872127872, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9165, + "step": 2237 + }, + { + "epoch": 1.788011988011988, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.9108, + "step": 2238 + }, + { + "epoch": 1.7888111888111888, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9251, + "step": 2239 + }, + { + "epoch": 1.7896103896103897, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9134, + "step": 2240 + }, + { + "epoch": 1.7904095904095905, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9211, + "step": 2241 + }, + { + "epoch": 1.791208791208791, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9255, + "step": 2242 + }, + { + "epoch": 1.792007992007992, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9673, + "step": 2243 + }, + { + "epoch": 1.7928071928071927, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9183, + "step": 2244 + }, + { + "epoch": 1.7936063936063937, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9134, + "step": 2245 + }, + { + "epoch": 1.7944055944055943, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.9187, + "step": 2246 + }, + { + "epoch": 1.7952047952047954, + "grad_norm": 1.578125, + "learning_rate": 0.0002, + "loss": 0.9369, + "step": 2247 + }, + { + "epoch": 1.796003996003996, + "grad_norm": 2.046875, + "learning_rate": 0.0002, + "loss": 0.9484, + "step": 2248 + }, + { + "epoch": 1.7968031968031968, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9103, + "step": 2249 + }, + { + "epoch": 1.7976023976023976, + "grad_norm": 1.3828125, + "learning_rate": 0.0002, + "loss": 0.9388, + "step": 2250 + }, + { + "epoch": 1.7984015984015984, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.9157, + "step": 2251 + }, + { + "epoch": 1.7992007992007992, + "grad_norm": 0.71484375, + "learning_rate": 0.0002, + "loss": 0.9099, + "step": 2252 + }, + { + "epoch": 1.8, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9215, + "step": 2253 + }, + { + "epoch": 1.8007992007992009, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.9204, + "step": 2254 + }, + { + "epoch": 1.8015984015984015, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9137, + "step": 2255 + }, + { + "epoch": 1.8023976023976025, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9107, + "step": 2256 + }, + { + "epoch": 1.803196803196803, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9294, + "step": 2257 + }, + { + "epoch": 1.8039960039960041, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9263, + "step": 2258 + }, + { + "epoch": 1.8047952047952047, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.9265, + "step": 2259 + }, + { + "epoch": 1.8055944055944058, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9151, + "step": 2260 + }, + { + "epoch": 1.8063936063936064, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9185, + "step": 2261 + }, + { + "epoch": 1.8071928071928072, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9125, + "step": 2262 + }, + { + "epoch": 1.807992007992008, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.9171, + "step": 2263 + }, + { + "epoch": 1.8087912087912088, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.9181, + "step": 2264 + }, + { + "epoch": 1.8095904095904096, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.9174, + "step": 2265 + }, + { + "epoch": 1.8103896103896104, + "grad_norm": 0.5859375, + "learning_rate": 0.0002, + "loss": 0.9213, + "step": 2266 + }, + { + "epoch": 1.8111888111888113, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.9189, + "step": 2267 + }, + { + "epoch": 1.8119880119880118, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9139, + "step": 2268 + }, + { + "epoch": 1.8127872127872129, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.9163, + "step": 2269 + }, + { + "epoch": 1.8135864135864135, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9216, + "step": 2270 + }, + { + "epoch": 1.8143856143856145, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.9161, + "step": 2271 + }, + { + "epoch": 1.8151848151848151, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9116, + "step": 2272 + }, + { + "epoch": 1.815984015984016, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9232, + "step": 2273 + }, + { + "epoch": 1.8167832167832167, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.9271, + "step": 2274 + }, + { + "epoch": 1.8175824175824176, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9257, + "step": 2275 + }, + { + "epoch": 1.8183816183816184, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9209, + "step": 2276 + }, + { + "epoch": 1.8191808191808192, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9171, + "step": 2277 + }, + { + "epoch": 1.81998001998002, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9169, + "step": 2278 + }, + { + "epoch": 1.8207792207792208, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9194, + "step": 2279 + }, + { + "epoch": 1.8215784215784216, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9256, + "step": 2280 + }, + { + "epoch": 1.8223776223776222, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.9182, + "step": 2281 + }, + { + "epoch": 1.8231768231768233, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9223, + "step": 2282 + }, + { + "epoch": 1.8239760239760239, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9102, + "step": 2283 + }, + { + "epoch": 1.824775224775225, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9232, + "step": 2284 + }, + { + "epoch": 1.8255744255744255, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9105, + "step": 2285 + }, + { + "epoch": 1.8263736263736263, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9263, + "step": 2286 + }, + { + "epoch": 1.8271728271728271, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9256, + "step": 2287 + }, + { + "epoch": 1.827972027972028, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9125, + "step": 2288 + }, + { + "epoch": 1.8287712287712288, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9285, + "step": 2289 + }, + { + "epoch": 1.8295704295704296, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.9099, + "step": 2290 + }, + { + "epoch": 1.8303696303696304, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9233, + "step": 2291 + }, + { + "epoch": 1.8311688311688312, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9229, + "step": 2292 + }, + { + "epoch": 1.831968031968032, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9154, + "step": 2293 + }, + { + "epoch": 1.8327672327672326, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.9161, + "step": 2294 + }, + { + "epoch": 1.8335664335664337, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9129, + "step": 2295 + }, + { + "epoch": 1.8343656343656343, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9183, + "step": 2296 + }, + { + "epoch": 1.8351648351648353, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9291, + "step": 2297 + }, + { + "epoch": 1.835964035964036, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9147, + "step": 2298 + }, + { + "epoch": 1.8367632367632367, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.9228, + "step": 2299 + }, + { + "epoch": 1.8375624375624375, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9208, + "step": 2300 + }, + { + "epoch": 1.8383616383616384, + "grad_norm": 0.60546875, + "learning_rate": 0.0002, + "loss": 0.916, + "step": 2301 + }, + { + "epoch": 1.8391608391608392, + "grad_norm": 0.6328125, + "learning_rate": 0.0002, + "loss": 0.9211, + "step": 2302 + }, + { + "epoch": 1.83996003996004, + "grad_norm": 0.765625, + "learning_rate": 0.0002, + "loss": 0.9438, + "step": 2303 + }, + { + "epoch": 1.8407592407592408, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9186, + "step": 2304 + }, + { + "epoch": 1.8415584415584414, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9031, + "step": 2305 + }, + { + "epoch": 1.8423576423576424, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9064, + "step": 2306 + }, + { + "epoch": 1.843156843156843, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9196, + "step": 2307 + }, + { + "epoch": 1.843956043956044, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9178, + "step": 2308 + }, + { + "epoch": 1.8447552447552447, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.9155, + "step": 2309 + }, + { + "epoch": 1.8455544455544457, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9167, + "step": 2310 + }, + { + "epoch": 1.8463536463536463, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9254, + "step": 2311 + }, + { + "epoch": 1.847152847152847, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9177, + "step": 2312 + }, + { + "epoch": 1.847952047952048, + "grad_norm": 0.66015625, + "learning_rate": 0.0002, + "loss": 0.9155, + "step": 2313 + }, + { + "epoch": 1.8487512487512487, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9262, + "step": 2314 + }, + { + "epoch": 1.8495504495504496, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9181, + "step": 2315 + }, + { + "epoch": 1.8503496503496504, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.914, + "step": 2316 + }, + { + "epoch": 1.8511488511488512, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9203, + "step": 2317 + }, + { + "epoch": 1.8519480519480518, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9246, + "step": 2318 + }, + { + "epoch": 1.8527472527472528, + "grad_norm": 0.69921875, + "learning_rate": 0.0002, + "loss": 0.906, + "step": 2319 + }, + { + "epoch": 1.8535464535464534, + "grad_norm": 0.86328125, + "learning_rate": 0.0002, + "loss": 0.9195, + "step": 2320 + }, + { + "epoch": 1.8543456543456545, + "grad_norm": 1.0, + "learning_rate": 0.0002, + "loss": 0.9177, + "step": 2321 + }, + { + "epoch": 1.855144855144855, + "grad_norm": 1.0703125, + "learning_rate": 0.0002, + "loss": 0.9228, + "step": 2322 + }, + { + "epoch": 1.855944055944056, + "grad_norm": 1.15625, + "learning_rate": 0.0002, + "loss": 0.9055, + "step": 2323 + }, + { + "epoch": 1.8567432567432567, + "grad_norm": 0.578125, + "learning_rate": 0.0002, + "loss": 0.9074, + "step": 2324 + }, + { + "epoch": 1.8575424575424575, + "grad_norm": 0.66015625, + "learning_rate": 0.0002, + "loss": 0.9199, + "step": 2325 + }, + { + "epoch": 1.8583416583416583, + "grad_norm": 1.1484375, + "learning_rate": 0.0002, + "loss": 0.9168, + "step": 2326 + }, + { + "epoch": 1.8591408591408591, + "grad_norm": 0.66796875, + "learning_rate": 0.0002, + "loss": 0.9084, + "step": 2327 + }, + { + "epoch": 1.85994005994006, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.9084, + "step": 2328 + }, + { + "epoch": 1.8607392607392608, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.9214, + "step": 2329 + }, + { + "epoch": 1.8615384615384616, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9176, + "step": 2330 + }, + { + "epoch": 1.8623376623376622, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.9211, + "step": 2331 + }, + { + "epoch": 1.8631368631368632, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.9171, + "step": 2332 + }, + { + "epoch": 1.8639360639360638, + "grad_norm": 0.671875, + "learning_rate": 0.0002, + "loss": 0.9271, + "step": 2333 + }, + { + "epoch": 1.8647352647352649, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9219, + "step": 2334 + }, + { + "epoch": 1.8655344655344654, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.9152, + "step": 2335 + }, + { + "epoch": 1.8663336663336665, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.9042, + "step": 2336 + }, + { + "epoch": 1.867132867132867, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.9204, + "step": 2337 + }, + { + "epoch": 1.867932067932068, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9074, + "step": 2338 + }, + { + "epoch": 1.8687312687312687, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.9163, + "step": 2339 + }, + { + "epoch": 1.8695304695304695, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9173, + "step": 2340 + }, + { + "epoch": 1.8703296703296703, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9113, + "step": 2341 + }, + { + "epoch": 1.8711288711288712, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.9096, + "step": 2342 + }, + { + "epoch": 1.871928071928072, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.908, + "step": 2343 + }, + { + "epoch": 1.8727272727272726, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9179, + "step": 2344 + }, + { + "epoch": 1.8735264735264736, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.9287, + "step": 2345 + }, + { + "epoch": 1.8743256743256742, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9157, + "step": 2346 + }, + { + "epoch": 1.8751248751248752, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9149, + "step": 2347 + }, + { + "epoch": 1.8759240759240758, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9091, + "step": 2348 + }, + { + "epoch": 1.8767232767232769, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9179, + "step": 2349 + }, + { + "epoch": 1.8775224775224775, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9188, + "step": 2350 + }, + { + "epoch": 1.8783216783216783, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9053, + "step": 2351 + }, + { + "epoch": 1.879120879120879, + "grad_norm": 0.58203125, + "learning_rate": 0.0002, + "loss": 0.9338, + "step": 2352 + }, + { + "epoch": 1.87992007992008, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.9225, + "step": 2353 + }, + { + "epoch": 1.8807192807192807, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9142, + "step": 2354 + }, + { + "epoch": 1.8815184815184816, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.9066, + "step": 2355 + }, + { + "epoch": 1.8823176823176824, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.9149, + "step": 2356 + }, + { + "epoch": 1.883116883116883, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9206, + "step": 2357 + }, + { + "epoch": 1.883916083916084, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.9196, + "step": 2358 + }, + { + "epoch": 1.8847152847152846, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9222, + "step": 2359 + }, + { + "epoch": 1.8855144855144856, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.912, + "step": 2360 + }, + { + "epoch": 1.8863136863136862, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9252, + "step": 2361 + }, + { + "epoch": 1.8871128871128873, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.9111, + "step": 2362 + }, + { + "epoch": 1.8879120879120879, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.9193, + "step": 2363 + }, + { + "epoch": 1.8887112887112887, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.9199, + "step": 2364 + }, + { + "epoch": 1.8895104895104895, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.9171, + "step": 2365 + }, + { + "epoch": 1.8903096903096903, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.9175, + "step": 2366 + }, + { + "epoch": 1.8911088911088911, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.9113, + "step": 2367 + }, + { + "epoch": 1.891908091908092, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9033, + "step": 2368 + }, + { + "epoch": 1.8927072927072928, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9142, + "step": 2369 + }, + { + "epoch": 1.8935064935064934, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.916, + "step": 2370 + }, + { + "epoch": 1.8943056943056944, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.9111, + "step": 2371 + }, + { + "epoch": 1.895104895104895, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.9101, + "step": 2372 + }, + { + "epoch": 1.895904095904096, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.9143, + "step": 2373 + }, + { + "epoch": 1.8967032967032966, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.9194, + "step": 2374 + }, + { + "epoch": 1.8975024975024977, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.9227, + "step": 2375 + }, + { + "epoch": 1.8983016983016983, + "grad_norm": 0.640625, + "learning_rate": 0.0002, + "loss": 0.9157, + "step": 2376 + }, + { + "epoch": 1.899100899100899, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9164, + "step": 2377 + }, + { + "epoch": 1.8999000999001, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.9172, + "step": 2378 + }, + { + "epoch": 1.9006993006993007, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9113, + "step": 2379 + }, + { + "epoch": 1.9014985014985015, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.918, + "step": 2380 + }, + { + "epoch": 1.9022977022977023, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.9089, + "step": 2381 + }, + { + "epoch": 1.9030969030969032, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9177, + "step": 2382 + }, + { + "epoch": 1.9038961038961038, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.9076, + "step": 2383 + }, + { + "epoch": 1.9046953046953048, + "grad_norm": 0.5859375, + "learning_rate": 0.0002, + "loss": 0.9081, + "step": 2384 + }, + { + "epoch": 1.9054945054945054, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9138, + "step": 2385 + }, + { + "epoch": 1.9062937062937064, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9166, + "step": 2386 + }, + { + "epoch": 1.907092907092907, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.9135, + "step": 2387 + }, + { + "epoch": 1.907892107892108, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9176, + "step": 2388 + }, + { + "epoch": 1.9086913086913087, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.9208, + "step": 2389 + }, + { + "epoch": 1.9094905094905095, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9184, + "step": 2390 + }, + { + "epoch": 1.9102897102897103, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.9243, + "step": 2391 + }, + { + "epoch": 1.911088911088911, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9204, + "step": 2392 + }, + { + "epoch": 1.911888111888112, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9325, + "step": 2393 + }, + { + "epoch": 1.9126873126873127, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9142, + "step": 2394 + }, + { + "epoch": 1.9134865134865136, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.9116, + "step": 2395 + }, + { + "epoch": 1.9142857142857141, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9049, + "step": 2396 + }, + { + "epoch": 1.9150849150849152, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9124, + "step": 2397 + }, + { + "epoch": 1.9158841158841158, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.9145, + "step": 2398 + }, + { + "epoch": 1.9166833166833168, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.9229, + "step": 2399 + }, + { + "epoch": 1.9174825174825174, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.9168, + "step": 2400 + }, + { + "epoch": 1.9182817182817182, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9183, + "step": 2401 + }, + { + "epoch": 1.919080919080919, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9093, + "step": 2402 + }, + { + "epoch": 1.9198801198801199, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.9113, + "step": 2403 + }, + { + "epoch": 1.9206793206793207, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.917, + "step": 2404 + }, + { + "epoch": 1.9214785214785215, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9155, + "step": 2405 + }, + { + "epoch": 1.9222777222777223, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.906, + "step": 2406 + }, + { + "epoch": 1.9230769230769231, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.9196, + "step": 2407 + }, + { + "epoch": 1.923876123876124, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9253, + "step": 2408 + }, + { + "epoch": 1.9246753246753245, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.9087, + "step": 2409 + }, + { + "epoch": 1.9254745254745256, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.9149, + "step": 2410 + }, + { + "epoch": 1.9262737262737262, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9184, + "step": 2411 + }, + { + "epoch": 1.9270729270729272, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9213, + "step": 2412 + }, + { + "epoch": 1.9278721278721278, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.922, + "step": 2413 + }, + { + "epoch": 1.9286713286713286, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9161, + "step": 2414 + }, + { + "epoch": 1.9294705294705294, + "grad_norm": 0.68359375, + "learning_rate": 0.0002, + "loss": 0.9291, + "step": 2415 + }, + { + "epoch": 1.9302697302697303, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.9139, + "step": 2416 + }, + { + "epoch": 1.931068931068931, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.9146, + "step": 2417 + }, + { + "epoch": 1.9318681318681319, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9148, + "step": 2418 + }, + { + "epoch": 1.9326673326673327, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.9149, + "step": 2419 + }, + { + "epoch": 1.9334665334665335, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9112, + "step": 2420 + }, + { + "epoch": 1.9342657342657343, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.9182, + "step": 2421 + }, + { + "epoch": 1.935064935064935, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9151, + "step": 2422 + }, + { + "epoch": 1.935864135864136, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9173, + "step": 2423 + }, + { + "epoch": 1.9366633366633366, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9118, + "step": 2424 + }, + { + "epoch": 1.9374625374625376, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.9149, + "step": 2425 + }, + { + "epoch": 1.9382617382617382, + "grad_norm": 0.7109375, + "learning_rate": 0.0002, + "loss": 0.9182, + "step": 2426 + }, + { + "epoch": 1.939060939060939, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.9071, + "step": 2427 + }, + { + "epoch": 1.9398601398601398, + "grad_norm": 1.0, + "learning_rate": 0.0002, + "loss": 0.9355, + "step": 2428 + }, + { + "epoch": 1.9406593406593406, + "grad_norm": 0.58203125, + "learning_rate": 0.0002, + "loss": 0.9073, + "step": 2429 + }, + { + "epoch": 1.9414585414585415, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.96, + "step": 2430 + }, + { + "epoch": 1.9422577422577423, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9152, + "step": 2431 + }, + { + "epoch": 1.943056943056943, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.918, + "step": 2432 + }, + { + "epoch": 1.9438561438561437, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.9236, + "step": 2433 + }, + { + "epoch": 1.9446553446553447, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.9135, + "step": 2434 + }, + { + "epoch": 1.9454545454545453, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.921, + "step": 2435 + }, + { + "epoch": 1.9462537462537464, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9146, + "step": 2436 + }, + { + "epoch": 1.947052947052947, + "grad_norm": 0.625, + "learning_rate": 0.0002, + "loss": 0.9241, + "step": 2437 + }, + { + "epoch": 1.947852147852148, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.914, + "step": 2438 + }, + { + "epoch": 1.9486513486513486, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9217, + "step": 2439 + }, + { + "epoch": 1.9494505494505494, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9269, + "step": 2440 + }, + { + "epoch": 1.9502497502497502, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.9107, + "step": 2441 + }, + { + "epoch": 1.951048951048951, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.9136, + "step": 2442 + }, + { + "epoch": 1.9518481518481519, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.9096, + "step": 2443 + }, + { + "epoch": 1.9526473526473527, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.9154, + "step": 2444 + }, + { + "epoch": 1.9534465534465535, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.9191, + "step": 2445 + }, + { + "epoch": 1.954245754245754, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.9145, + "step": 2446 + }, + { + "epoch": 1.9550449550449551, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.909, + "step": 2447 + }, + { + "epoch": 1.9558441558441557, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.9231, + "step": 2448 + }, + { + "epoch": 1.9566433566433568, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.9048, + "step": 2449 + }, + { + "epoch": 1.9574425574425574, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9283, + "step": 2450 + }, + { + "epoch": 1.9582417582417584, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.9119, + "step": 2451 + }, + { + "epoch": 1.959040959040959, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9235, + "step": 2452 + }, + { + "epoch": 1.9598401598401598, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.9144, + "step": 2453 + }, + { + "epoch": 1.9606393606393606, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9185, + "step": 2454 + }, + { + "epoch": 1.9614385614385614, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.9213, + "step": 2455 + }, + { + "epoch": 1.9622377622377623, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9171, + "step": 2456 + }, + { + "epoch": 1.963036963036963, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.911, + "step": 2457 + }, + { + "epoch": 1.9638361638361639, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.9143, + "step": 2458 + }, + { + "epoch": 1.9646353646353645, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.9083, + "step": 2459 + }, + { + "epoch": 1.9654345654345655, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9116, + "step": 2460 + }, + { + "epoch": 1.9662337662337661, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.9087, + "step": 2461 + }, + { + "epoch": 1.9670329670329672, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.907, + "step": 2462 + }, + { + "epoch": 1.9678321678321677, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9088, + "step": 2463 + }, + { + "epoch": 1.9686313686313688, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9171, + "step": 2464 + }, + { + "epoch": 1.9694305694305694, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9129, + "step": 2465 + }, + { + "epoch": 1.9702297702297702, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9137, + "step": 2466 + }, + { + "epoch": 1.971028971028971, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.9062, + "step": 2467 + }, + { + "epoch": 1.9718281718281718, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9114, + "step": 2468 + }, + { + "epoch": 1.9726273726273726, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.918, + "step": 2469 + }, + { + "epoch": 1.9734265734265735, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.914, + "step": 2470 + }, + { + "epoch": 1.9742257742257743, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.927, + "step": 2471 + }, + { + "epoch": 1.9750249750249749, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9089, + "step": 2472 + }, + { + "epoch": 1.975824175824176, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9116, + "step": 2473 + }, + { + "epoch": 1.9766233766233765, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9225, + "step": 2474 + }, + { + "epoch": 1.9774225774225775, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.9131, + "step": 2475 + }, + { + "epoch": 1.9782217782217781, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9065, + "step": 2476 + }, + { + "epoch": 1.9790209790209792, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.9139, + "step": 2477 + }, + { + "epoch": 1.9798201798201798, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9127, + "step": 2478 + }, + { + "epoch": 1.9806193806193806, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.9058, + "step": 2479 + }, + { + "epoch": 1.9814185814185814, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.9041, + "step": 2480 + }, + { + "epoch": 1.9822177822177822, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.915, + "step": 2481 + }, + { + "epoch": 1.983016983016983, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.9071, + "step": 2482 + }, + { + "epoch": 1.9838161838161839, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.9132, + "step": 2483 + }, + { + "epoch": 1.9846153846153847, + "grad_norm": 0.6796875, + "learning_rate": 0.0002, + "loss": 0.9317, + "step": 2484 + }, + { + "epoch": 1.9854145854145853, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.9105, + "step": 2485 + }, + { + "epoch": 1.9862137862137863, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.9183, + "step": 2486 + }, + { + "epoch": 1.987012987012987, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9167, + "step": 2487 + }, + { + "epoch": 1.987812187812188, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.9174, + "step": 2488 + }, + { + "epoch": 1.9886113886113885, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.9136, + "step": 2489 + }, + { + "epoch": 1.9894105894105896, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.915, + "step": 2490 + }, + { + "epoch": 1.9902097902097902, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.9201, + "step": 2491 + }, + { + "epoch": 1.991008991008991, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.9091, + "step": 2492 + }, + { + "epoch": 1.9918081918081918, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.9112, + "step": 2493 + }, + { + "epoch": 1.9926073926073926, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.9076, + "step": 2494 + }, + { + "epoch": 1.9934065934065934, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9117, + "step": 2495 + }, + { + "epoch": 1.9942057942057942, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9102, + "step": 2496 + }, + { + "epoch": 1.995004995004995, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.9106, + "step": 2497 + }, + { + "epoch": 1.9958041958041957, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9085, + "step": 2498 + }, + { + "epoch": 1.9966033966033967, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9089, + "step": 2499 + }, + { + "epoch": 1.9974025974025973, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9245, + "step": 2500 + }, + { + "epoch": 1.9982017982017983, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.9193, + "step": 2501 + }, + { + "epoch": 1.999000999000999, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.927, + "step": 2502 + }, + { + "epoch": 1.9998001998002, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.9108, + "step": 2503 + }, + { + "epoch": 2.0, + "grad_norm": 0.1484375, + "learning_rate": 0.0002, + "loss": 0.2242, + "step": 2504 + }, + { + "epoch": 2.0007992007992006, + "grad_norm": 0.59765625, + "learning_rate": 0.0002, + "loss": 0.9162, + "step": 2505 + }, + { + "epoch": 2.0015984015984016, + "grad_norm": 0.890625, + "learning_rate": 0.0002, + "loss": 0.9171, + "step": 2506 + }, + { + "epoch": 2.0023976023976022, + "grad_norm": 1.4765625, + "learning_rate": 0.0002, + "loss": 0.9137, + "step": 2507 + }, + { + "epoch": 2.0031968031968033, + "grad_norm": 1.0, + "learning_rate": 0.0002, + "loss": 0.9228, + "step": 2508 + }, + { + "epoch": 2.003996003996004, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.916, + "step": 2509 + }, + { + "epoch": 2.004795204795205, + "grad_norm": 0.6328125, + "learning_rate": 0.0002, + "loss": 0.9169, + "step": 2510 + }, + { + "epoch": 2.0055944055944055, + "grad_norm": 0.97265625, + "learning_rate": 0.0002, + "loss": 0.9162, + "step": 2511 + }, + { + "epoch": 2.0063936063936065, + "grad_norm": 1.2265625, + "learning_rate": 0.0002, + "loss": 0.911, + "step": 2512 + }, + { + "epoch": 2.007192807192807, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9013, + "step": 2513 + }, + { + "epoch": 2.007992007992008, + "grad_norm": 0.890625, + "learning_rate": 0.0002, + "loss": 0.9199, + "step": 2514 + }, + { + "epoch": 2.0087912087912088, + "grad_norm": 1.1953125, + "learning_rate": 0.0002, + "loss": 0.9166, + "step": 2515 + }, + { + "epoch": 2.00959040959041, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.9068, + "step": 2516 + }, + { + "epoch": 2.0103896103896104, + "grad_norm": 0.5859375, + "learning_rate": 0.0002, + "loss": 0.917, + "step": 2517 + }, + { + "epoch": 2.011188811188811, + "grad_norm": 0.5859375, + "learning_rate": 0.0002, + "loss": 0.9166, + "step": 2518 + }, + { + "epoch": 2.011988011988012, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.912, + "step": 2519 + }, + { + "epoch": 2.0127872127872126, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.915, + "step": 2520 + }, + { + "epoch": 2.0135864135864137, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9128, + "step": 2521 + }, + { + "epoch": 2.0143856143856143, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.92, + "step": 2522 + }, + { + "epoch": 2.0151848151848153, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9187, + "step": 2523 + }, + { + "epoch": 2.015984015984016, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9106, + "step": 2524 + }, + { + "epoch": 2.016783216783217, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.911, + "step": 2525 + }, + { + "epoch": 2.0175824175824175, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.9118, + "step": 2526 + }, + { + "epoch": 2.0183816183816186, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.9154, + "step": 2527 + }, + { + "epoch": 2.019180819180819, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9086, + "step": 2528 + }, + { + "epoch": 2.01998001998002, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.9116, + "step": 2529 + }, + { + "epoch": 2.020779220779221, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.9144, + "step": 2530 + }, + { + "epoch": 2.0215784215784214, + "grad_norm": 0.97265625, + "learning_rate": 0.0002, + "loss": 0.9415, + "step": 2531 + }, + { + "epoch": 2.0223776223776224, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.9127, + "step": 2532 + }, + { + "epoch": 2.023176823176823, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9086, + "step": 2533 + }, + { + "epoch": 2.023976023976024, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.9161, + "step": 2534 + }, + { + "epoch": 2.0247752247752246, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9129, + "step": 2535 + }, + { + "epoch": 2.0255744255744257, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.9163, + "step": 2536 + }, + { + "epoch": 2.0263736263736263, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.9087, + "step": 2537 + }, + { + "epoch": 2.0271728271728273, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.916, + "step": 2538 + }, + { + "epoch": 2.027972027972028, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.9051, + "step": 2539 + }, + { + "epoch": 2.028771228771229, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9164, + "step": 2540 + }, + { + "epoch": 2.0295704295704295, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9171, + "step": 2541 + }, + { + "epoch": 2.0303696303696306, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9181, + "step": 2542 + }, + { + "epoch": 2.031168831168831, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9058, + "step": 2543 + }, + { + "epoch": 2.0319680319680318, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9206, + "step": 2544 + }, + { + "epoch": 2.032767232767233, + "grad_norm": 0.703125, + "learning_rate": 0.0002, + "loss": 0.9131, + "step": 2545 + }, + { + "epoch": 2.0335664335664334, + "grad_norm": 0.9609375, + "learning_rate": 0.0002, + "loss": 0.909, + "step": 2546 + }, + { + "epoch": 2.0343656343656344, + "grad_norm": 1.1640625, + "learning_rate": 0.0002, + "loss": 0.9118, + "step": 2547 + }, + { + "epoch": 2.035164835164835, + "grad_norm": 0.80859375, + "learning_rate": 0.0002, + "loss": 0.9119, + "step": 2548 + }, + { + "epoch": 2.035964035964036, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9173, + "step": 2549 + }, + { + "epoch": 2.0367632367632367, + "grad_norm": 1.015625, + "learning_rate": 0.0002, + "loss": 0.911, + "step": 2550 + }, + { + "epoch": 2.0375624375624377, + "grad_norm": 1.0078125, + "learning_rate": 0.0002, + "loss": 0.9193, + "step": 2551 + }, + { + "epoch": 2.0383616383616383, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.9235, + "step": 2552 + }, + { + "epoch": 2.0391608391608393, + "grad_norm": 0.62890625, + "learning_rate": 0.0002, + "loss": 0.9088, + "step": 2553 + }, + { + "epoch": 2.03996003996004, + "grad_norm": 0.80859375, + "learning_rate": 0.0002, + "loss": 0.9073, + "step": 2554 + }, + { + "epoch": 2.040759240759241, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.925, + "step": 2555 + }, + { + "epoch": 2.0415584415584416, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.9222, + "step": 2556 + }, + { + "epoch": 2.042357642357642, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.9229, + "step": 2557 + }, + { + "epoch": 2.043156843156843, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9086, + "step": 2558 + }, + { + "epoch": 2.043956043956044, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9281, + "step": 2559 + }, + { + "epoch": 2.044755244755245, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9139, + "step": 2560 + }, + { + "epoch": 2.0455544455544454, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9178, + "step": 2561 + }, + { + "epoch": 2.0463536463536465, + "grad_norm": 0.6875, + "learning_rate": 0.0002, + "loss": 0.9324, + "step": 2562 + }, + { + "epoch": 2.047152847152847, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.91, + "step": 2563 + }, + { + "epoch": 2.047952047952048, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.916, + "step": 2564 + }, + { + "epoch": 2.0487512487512487, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9153, + "step": 2565 + }, + { + "epoch": 2.0495504495504497, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9044, + "step": 2566 + }, + { + "epoch": 2.0503496503496503, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9116, + "step": 2567 + }, + { + "epoch": 2.0511488511488514, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9104, + "step": 2568 + }, + { + "epoch": 2.051948051948052, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.9215, + "step": 2569 + }, + { + "epoch": 2.0527472527472526, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9173, + "step": 2570 + }, + { + "epoch": 2.0535464535464536, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.9154, + "step": 2571 + }, + { + "epoch": 2.054345654345654, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9054, + "step": 2572 + }, + { + "epoch": 2.0551448551448552, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.9156, + "step": 2573 + }, + { + "epoch": 2.055944055944056, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9091, + "step": 2574 + }, + { + "epoch": 2.056743256743257, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.918, + "step": 2575 + }, + { + "epoch": 2.0575424575424575, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.922, + "step": 2576 + }, + { + "epoch": 2.0583416583416585, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9001, + "step": 2577 + }, + { + "epoch": 2.059140859140859, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9071, + "step": 2578 + }, + { + "epoch": 2.05994005994006, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.9163, + "step": 2579 + }, + { + "epoch": 2.0607392607392607, + "grad_norm": 0.69140625, + "learning_rate": 0.0002, + "loss": 0.9086, + "step": 2580 + }, + { + "epoch": 2.0615384615384613, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9139, + "step": 2581 + }, + { + "epoch": 2.0623376623376624, + "grad_norm": 0.6875, + "learning_rate": 0.0002, + "loss": 0.9257, + "step": 2582 + }, + { + "epoch": 2.063136863136863, + "grad_norm": 0.66796875, + "learning_rate": 0.0002, + "loss": 0.911, + "step": 2583 + }, + { + "epoch": 2.063936063936064, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9133, + "step": 2584 + }, + { + "epoch": 2.0647352647352646, + "grad_norm": 0.60546875, + "learning_rate": 0.0002, + "loss": 0.9157, + "step": 2585 + }, + { + "epoch": 2.0655344655344656, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.9087, + "step": 2586 + }, + { + "epoch": 2.066333666333666, + "grad_norm": 0.578125, + "learning_rate": 0.0002, + "loss": 0.9143, + "step": 2587 + }, + { + "epoch": 2.0671328671328673, + "grad_norm": 0.61328125, + "learning_rate": 0.0002, + "loss": 0.9142, + "step": 2588 + }, + { + "epoch": 2.067932067932068, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.9135, + "step": 2589 + }, + { + "epoch": 2.068731268731269, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.9017, + "step": 2590 + }, + { + "epoch": 2.0695304695304695, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9127, + "step": 2591 + }, + { + "epoch": 2.0703296703296705, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.9144, + "step": 2592 + }, + { + "epoch": 2.071128871128871, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9094, + "step": 2593 + }, + { + "epoch": 2.071928071928072, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.9206, + "step": 2594 + }, + { + "epoch": 2.0727272727272728, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9097, + "step": 2595 + }, + { + "epoch": 2.0735264735264733, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.9169, + "step": 2596 + }, + { + "epoch": 2.0743256743256744, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.9168, + "step": 2597 + }, + { + "epoch": 2.075124875124875, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.9196, + "step": 2598 + }, + { + "epoch": 2.075924075924076, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9072, + "step": 2599 + }, + { + "epoch": 2.0767232767232766, + "grad_norm": 0.65625, + "learning_rate": 0.0002, + "loss": 0.9058, + "step": 2600 + }, + { + "epoch": 2.0775224775224777, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.9152, + "step": 2601 + }, + { + "epoch": 2.0783216783216782, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.9124, + "step": 2602 + }, + { + "epoch": 2.0791208791208793, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.9151, + "step": 2603 + }, + { + "epoch": 2.07992007992008, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.9215, + "step": 2604 + }, + { + "epoch": 2.080719280719281, + "grad_norm": 0.7421875, + "learning_rate": 0.0002, + "loss": 0.9089, + "step": 2605 + }, + { + "epoch": 2.0815184815184815, + "grad_norm": 0.99609375, + "learning_rate": 0.0002, + "loss": 0.9126, + "step": 2606 + }, + { + "epoch": 2.082317682317682, + "grad_norm": 0.9609375, + "learning_rate": 0.0002, + "loss": 0.9101, + "step": 2607 + }, + { + "epoch": 2.083116883116883, + "grad_norm": 0.625, + "learning_rate": 0.0002, + "loss": 0.9052, + "step": 2608 + }, + { + "epoch": 2.0839160839160837, + "grad_norm": 0.5859375, + "learning_rate": 0.0002, + "loss": 0.915, + "step": 2609 + }, + { + "epoch": 2.0847152847152848, + "grad_norm": 0.78125, + "learning_rate": 0.0002, + "loss": 0.9135, + "step": 2610 + }, + { + "epoch": 2.0855144855144854, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.9051, + "step": 2611 + }, + { + "epoch": 2.0863136863136864, + "grad_norm": 0.6328125, + "learning_rate": 0.0002, + "loss": 0.9173, + "step": 2612 + }, + { + "epoch": 2.087112887112887, + "grad_norm": 0.78125, + "learning_rate": 0.0002, + "loss": 0.906, + "step": 2613 + }, + { + "epoch": 2.087912087912088, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.9163, + "step": 2614 + }, + { + "epoch": 2.0887112887112886, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9117, + "step": 2615 + }, + { + "epoch": 2.0895104895104897, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9136, + "step": 2616 + }, + { + "epoch": 2.0903096903096903, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9134, + "step": 2617 + }, + { + "epoch": 2.0911088911088913, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9104, + "step": 2618 + }, + { + "epoch": 2.091908091908092, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9051, + "step": 2619 + }, + { + "epoch": 2.0927072927072925, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.912, + "step": 2620 + }, + { + "epoch": 2.0935064935064935, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9143, + "step": 2621 + }, + { + "epoch": 2.094305694305694, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9175, + "step": 2622 + }, + { + "epoch": 2.095104895104895, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9071, + "step": 2623 + }, + { + "epoch": 2.0959040959040958, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9049, + "step": 2624 + }, + { + "epoch": 2.096703296703297, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9112, + "step": 2625 + }, + { + "epoch": 2.0975024975024974, + "grad_norm": 0.890625, + "learning_rate": 0.0002, + "loss": 0.9425, + "step": 2626 + }, + { + "epoch": 2.0983016983016984, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9184, + "step": 2627 + }, + { + "epoch": 2.099100899100899, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9133, + "step": 2628 + }, + { + "epoch": 2.0999000999001, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.9163, + "step": 2629 + }, + { + "epoch": 2.1006993006993007, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9135, + "step": 2630 + }, + { + "epoch": 2.1014985014985017, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.9044, + "step": 2631 + }, + { + "epoch": 2.1022977022977023, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9183, + "step": 2632 + }, + { + "epoch": 2.103096903096903, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.9102, + "step": 2633 + }, + { + "epoch": 2.103896103896104, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9085, + "step": 2634 + }, + { + "epoch": 2.1046953046953045, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9049, + "step": 2635 + }, + { + "epoch": 2.1054945054945056, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9074, + "step": 2636 + }, + { + "epoch": 2.106293706293706, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9115, + "step": 2637 + }, + { + "epoch": 2.107092907092907, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9116, + "step": 2638 + }, + { + "epoch": 2.107892107892108, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.9091, + "step": 2639 + }, + { + "epoch": 2.108691308691309, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9087, + "step": 2640 + }, + { + "epoch": 2.1094905094905094, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9138, + "step": 2641 + }, + { + "epoch": 2.1102897102897105, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.9162, + "step": 2642 + }, + { + "epoch": 2.111088911088911, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.9078, + "step": 2643 + }, + { + "epoch": 2.111888111888112, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9176, + "step": 2644 + }, + { + "epoch": 2.1126873126873127, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9069, + "step": 2645 + }, + { + "epoch": 2.1134865134865133, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9139, + "step": 2646 + }, + { + "epoch": 2.1142857142857143, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9133, + "step": 2647 + }, + { + "epoch": 2.115084915084915, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9049, + "step": 2648 + }, + { + "epoch": 2.115884115884116, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9094, + "step": 2649 + }, + { + "epoch": 2.1166833166833166, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 2650 + }, + { + "epoch": 2.1174825174825176, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.9127, + "step": 2651 + }, + { + "epoch": 2.118281718281718, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9119, + "step": 2652 + }, + { + "epoch": 2.1190809190809192, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.9098, + "step": 2653 + }, + { + "epoch": 2.11988011988012, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.917, + "step": 2654 + }, + { + "epoch": 2.120679320679321, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.9111, + "step": 2655 + }, + { + "epoch": 2.1214785214785215, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9166, + "step": 2656 + }, + { + "epoch": 2.1222777222777225, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9128, + "step": 2657 + }, + { + "epoch": 2.123076923076923, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9178, + "step": 2658 + }, + { + "epoch": 2.1238761238761237, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.9148, + "step": 2659 + }, + { + "epoch": 2.1246753246753247, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9124, + "step": 2660 + }, + { + "epoch": 2.1254745254745253, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.9133, + "step": 2661 + }, + { + "epoch": 2.1262737262737263, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9131, + "step": 2662 + }, + { + "epoch": 2.127072927072927, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.9115, + "step": 2663 + }, + { + "epoch": 2.127872127872128, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9055, + "step": 2664 + }, + { + "epoch": 2.1286713286713286, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9083, + "step": 2665 + }, + { + "epoch": 2.1294705294705296, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.9106, + "step": 2666 + }, + { + "epoch": 2.13026973026973, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9162, + "step": 2667 + }, + { + "epoch": 2.1310689310689312, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.9131, + "step": 2668 + }, + { + "epoch": 2.131868131868132, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9165, + "step": 2669 + }, + { + "epoch": 2.1326673326673324, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.9121, + "step": 2670 + }, + { + "epoch": 2.1334665334665335, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.9083, + "step": 2671 + }, + { + "epoch": 2.134265734265734, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.9063, + "step": 2672 + }, + { + "epoch": 2.135064935064935, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9168, + "step": 2673 + }, + { + "epoch": 2.1358641358641357, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.913, + "step": 2674 + }, + { + "epoch": 2.1366633366633367, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9068, + "step": 2675 + }, + { + "epoch": 2.1374625374625373, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.9048, + "step": 2676 + }, + { + "epoch": 2.1382617382617384, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.9071, + "step": 2677 + }, + { + "epoch": 2.139060939060939, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.915, + "step": 2678 + }, + { + "epoch": 2.13986013986014, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9041, + "step": 2679 + }, + { + "epoch": 2.1406593406593406, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9127, + "step": 2680 + }, + { + "epoch": 2.1414585414585416, + "grad_norm": 0.6953125, + "learning_rate": 0.0002, + "loss": 0.9079, + "step": 2681 + }, + { + "epoch": 2.1422577422577422, + "grad_norm": 0.6640625, + "learning_rate": 0.0002, + "loss": 0.917, + "step": 2682 + }, + { + "epoch": 2.1430569430569433, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9104, + "step": 2683 + }, + { + "epoch": 2.143856143856144, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.9101, + "step": 2684 + }, + { + "epoch": 2.1446553446553445, + "grad_norm": 0.78125, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 2685 + }, + { + "epoch": 2.1454545454545455, + "grad_norm": 1.140625, + "learning_rate": 0.0002, + "loss": 0.9118, + "step": 2686 + }, + { + "epoch": 2.146253746253746, + "grad_norm": 0.7578125, + "learning_rate": 0.0002, + "loss": 0.9186, + "step": 2687 + }, + { + "epoch": 2.147052947052947, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.9142, + "step": 2688 + }, + { + "epoch": 2.1478521478521477, + "grad_norm": 0.8828125, + "learning_rate": 0.0002, + "loss": 0.9143, + "step": 2689 + }, + { + "epoch": 2.1486513486513488, + "grad_norm": 0.92578125, + "learning_rate": 0.0002, + "loss": 0.9041, + "step": 2690 + }, + { + "epoch": 2.1494505494505494, + "grad_norm": 0.68359375, + "learning_rate": 0.0002, + "loss": 0.9113, + "step": 2691 + }, + { + "epoch": 2.1502497502497504, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9097, + "step": 2692 + }, + { + "epoch": 2.151048951048951, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.9214, + "step": 2693 + }, + { + "epoch": 2.151848151848152, + "grad_norm": 0.64453125, + "learning_rate": 0.0002, + "loss": 0.9128, + "step": 2694 + }, + { + "epoch": 2.1526473526473526, + "grad_norm": 0.66015625, + "learning_rate": 0.0002, + "loss": 0.9041, + "step": 2695 + }, + { + "epoch": 2.1534465534465532, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9099, + "step": 2696 + }, + { + "epoch": 2.1542457542457543, + "grad_norm": 0.58203125, + "learning_rate": 0.0002, + "loss": 0.9182, + "step": 2697 + }, + { + "epoch": 2.155044955044955, + "grad_norm": 0.69921875, + "learning_rate": 0.0002, + "loss": 0.9095, + "step": 2698 + }, + { + "epoch": 2.155844155844156, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.9133, + "step": 2699 + }, + { + "epoch": 2.1566433566433565, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.9079, + "step": 2700 + }, + { + "epoch": 2.1574425574425575, + "grad_norm": 0.62890625, + "learning_rate": 0.0002, + "loss": 0.9118, + "step": 2701 + }, + { + "epoch": 2.158241758241758, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.9133, + "step": 2702 + }, + { + "epoch": 2.159040959040959, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.9094, + "step": 2703 + }, + { + "epoch": 2.1598401598401598, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.9102, + "step": 2704 + }, + { + "epoch": 2.160639360639361, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.9093, + "step": 2705 + }, + { + "epoch": 2.1614385614385614, + "grad_norm": 0.8984375, + "learning_rate": 0.0002, + "loss": 0.9231, + "step": 2706 + }, + { + "epoch": 2.1622377622377624, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.9063, + "step": 2707 + }, + { + "epoch": 2.163036963036963, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.9161, + "step": 2708 + }, + { + "epoch": 2.163836163836164, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.91, + "step": 2709 + }, + { + "epoch": 2.1646353646353647, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9134, + "step": 2710 + }, + { + "epoch": 2.1654345654345653, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9103, + "step": 2711 + }, + { + "epoch": 2.1662337662337663, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9122, + "step": 2712 + }, + { + "epoch": 2.167032967032967, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.9098, + "step": 2713 + }, + { + "epoch": 2.167832167832168, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9112, + "step": 2714 + }, + { + "epoch": 2.1686313686313685, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9087, + "step": 2715 + }, + { + "epoch": 2.1694305694305696, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9138, + "step": 2716 + }, + { + "epoch": 2.17022977022977, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.9203, + "step": 2717 + }, + { + "epoch": 2.171028971028971, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9222, + "step": 2718 + }, + { + "epoch": 2.171828171828172, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9036, + "step": 2719 + }, + { + "epoch": 2.172627372627373, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9063, + "step": 2720 + }, + { + "epoch": 2.1734265734265734, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9166, + "step": 2721 + }, + { + "epoch": 2.174225774225774, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9075, + "step": 2722 + }, + { + "epoch": 2.175024975024975, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9121, + "step": 2723 + }, + { + "epoch": 2.1758241758241756, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9065, + "step": 2724 + }, + { + "epoch": 2.1766233766233767, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9093, + "step": 2725 + }, + { + "epoch": 2.1774225774225773, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9126, + "step": 2726 + }, + { + "epoch": 2.1782217782217783, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9056, + "step": 2727 + }, + { + "epoch": 2.179020979020979, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.903, + "step": 2728 + }, + { + "epoch": 2.17982017982018, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9077, + "step": 2729 + }, + { + "epoch": 2.1806193806193805, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9086, + "step": 2730 + }, + { + "epoch": 2.1814185814185816, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9066, + "step": 2731 + }, + { + "epoch": 2.182217782217782, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9073, + "step": 2732 + }, + { + "epoch": 2.183016983016983, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9121, + "step": 2733 + }, + { + "epoch": 2.183816183816184, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9035, + "step": 2734 + }, + { + "epoch": 2.184615384615385, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9075, + "step": 2735 + }, + { + "epoch": 2.1854145854145854, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9143, + "step": 2736 + }, + { + "epoch": 2.186213786213786, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9145, + "step": 2737 + }, + { + "epoch": 2.187012987012987, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9117, + "step": 2738 + }, + { + "epoch": 2.1878121878121877, + "grad_norm": 0.58203125, + "learning_rate": 0.0002, + "loss": 0.9118, + "step": 2739 + }, + { + "epoch": 2.1886113886113887, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.9124, + "step": 2740 + }, + { + "epoch": 2.1894105894105893, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.915, + "step": 2741 + }, + { + "epoch": 2.1902097902097903, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9144, + "step": 2742 + }, + { + "epoch": 2.191008991008991, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9062, + "step": 2743 + }, + { + "epoch": 2.191808191808192, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.909, + "step": 2744 + }, + { + "epoch": 2.1926073926073926, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.8992, + "step": 2745 + }, + { + "epoch": 2.1934065934065936, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9227, + "step": 2746 + }, + { + "epoch": 2.194205794205794, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9073, + "step": 2747 + }, + { + "epoch": 2.195004995004995, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9143, + "step": 2748 + }, + { + "epoch": 2.195804195804196, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9063, + "step": 2749 + }, + { + "epoch": 2.1966033966033964, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9132, + "step": 2750 + }, + { + "epoch": 2.1974025974025975, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.9156, + "step": 2751 + }, + { + "epoch": 2.198201798201798, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9122, + "step": 2752 + }, + { + "epoch": 2.199000999000999, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.9175, + "step": 2753 + }, + { + "epoch": 2.1998001998001997, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.915, + "step": 2754 + }, + { + "epoch": 2.2005994005994007, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.9081, + "step": 2755 + }, + { + "epoch": 2.2013986013986013, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.9155, + "step": 2756 + }, + { + "epoch": 2.2021978021978024, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.9068, + "step": 2757 + }, + { + "epoch": 2.202997002997003, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9041, + "step": 2758 + }, + { + "epoch": 2.203796203796204, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9141, + "step": 2759 + }, + { + "epoch": 2.2045954045954046, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.9106, + "step": 2760 + }, + { + "epoch": 2.205394605394605, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.915, + "step": 2761 + }, + { + "epoch": 2.2061938061938062, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9083, + "step": 2762 + }, + { + "epoch": 2.206993006993007, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9112, + "step": 2763 + }, + { + "epoch": 2.207792207792208, + "grad_norm": 1.75, + "learning_rate": 0.0002, + "loss": 0.9361, + "step": 2764 + }, + { + "epoch": 2.2085914085914085, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9118, + "step": 2765 + }, + { + "epoch": 2.2093906093906095, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9091, + "step": 2766 + }, + { + "epoch": 2.21018981018981, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9118, + "step": 2767 + }, + { + "epoch": 2.210989010989011, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.9139, + "step": 2768 + }, + { + "epoch": 2.2117882117882117, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.9159, + "step": 2769 + }, + { + "epoch": 2.2125874125874128, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9094, + "step": 2770 + }, + { + "epoch": 2.2133866133866134, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.9026, + "step": 2771 + }, + { + "epoch": 2.2141858141858144, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9027, + "step": 2772 + }, + { + "epoch": 2.214985014985015, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9059, + "step": 2773 + }, + { + "epoch": 2.2157842157842156, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9118, + "step": 2774 + }, + { + "epoch": 2.2165834165834166, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.9063, + "step": 2775 + }, + { + "epoch": 2.217382617382617, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9079, + "step": 2776 + }, + { + "epoch": 2.2181818181818183, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9228, + "step": 2777 + }, + { + "epoch": 2.218981018981019, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9208, + "step": 2778 + }, + { + "epoch": 2.21978021978022, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 2779 + }, + { + "epoch": 2.2205794205794205, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9071, + "step": 2780 + }, + { + "epoch": 2.2213786213786215, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.9101, + "step": 2781 + }, + { + "epoch": 2.222177822177822, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9126, + "step": 2782 + }, + { + "epoch": 2.222977022977023, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9031, + "step": 2783 + }, + { + "epoch": 2.2237762237762237, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.914, + "step": 2784 + }, + { + "epoch": 2.2245754245754243, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9136, + "step": 2785 + }, + { + "epoch": 2.2253746253746254, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.909, + "step": 2786 + }, + { + "epoch": 2.226173826173826, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.9079, + "step": 2787 + }, + { + "epoch": 2.226973026973027, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.914, + "step": 2788 + }, + { + "epoch": 2.2277722277722276, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.9108, + "step": 2789 + }, + { + "epoch": 2.2285714285714286, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.9101, + "step": 2790 + }, + { + "epoch": 2.2293706293706292, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9133, + "step": 2791 + }, + { + "epoch": 2.2301698301698303, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9104, + "step": 2792 + }, + { + "epoch": 2.230969030969031, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.909, + "step": 2793 + }, + { + "epoch": 2.231768231768232, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.9126, + "step": 2794 + }, + { + "epoch": 2.2325674325674325, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9038, + "step": 2795 + }, + { + "epoch": 2.2333666333666335, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 2796 + }, + { + "epoch": 2.234165834165834, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9052, + "step": 2797 + }, + { + "epoch": 2.234965034965035, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.915, + "step": 2798 + }, + { + "epoch": 2.2357642357642358, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.9041, + "step": 2799 + }, + { + "epoch": 2.2365634365634364, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 2800 + }, + { + "epoch": 2.2373626373626374, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.9094, + "step": 2801 + }, + { + "epoch": 2.238161838161838, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.9184, + "step": 2802 + }, + { + "epoch": 2.238961038961039, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.9102, + "step": 2803 + }, + { + "epoch": 2.2397602397602396, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.8991, + "step": 2804 + }, + { + "epoch": 2.2405594405594407, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9166, + "step": 2805 + }, + { + "epoch": 2.2413586413586413, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9129, + "step": 2806 + }, + { + "epoch": 2.2421578421578423, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9213, + "step": 2807 + }, + { + "epoch": 2.242957042957043, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9009, + "step": 2808 + }, + { + "epoch": 2.243756243756244, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.905, + "step": 2809 + }, + { + "epoch": 2.2445554445554445, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9156, + "step": 2810 + }, + { + "epoch": 2.245354645354645, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9026, + "step": 2811 + }, + { + "epoch": 2.246153846153846, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.9024, + "step": 2812 + }, + { + "epoch": 2.2469530469530468, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9194, + "step": 2813 + }, + { + "epoch": 2.247752247752248, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9064, + "step": 2814 + }, + { + "epoch": 2.2485514485514484, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9064, + "step": 2815 + }, + { + "epoch": 2.2493506493506494, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.9146, + "step": 2816 + }, + { + "epoch": 2.25014985014985, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9078, + "step": 2817 + }, + { + "epoch": 2.250949050949051, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.9088, + "step": 2818 + }, + { + "epoch": 2.2517482517482517, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9127, + "step": 2819 + }, + { + "epoch": 2.2525474525474527, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 2820 + }, + { + "epoch": 2.2533466533466533, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9056, + "step": 2821 + }, + { + "epoch": 2.2541458541458543, + "grad_norm": 1.15625, + "learning_rate": 0.0002, + "loss": 0.9298, + "step": 2822 + }, + { + "epoch": 2.254945054945055, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9065, + "step": 2823 + }, + { + "epoch": 2.255744255744256, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.9038, + "step": 2824 + }, + { + "epoch": 2.2565434565434566, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9132, + "step": 2825 + }, + { + "epoch": 2.257342657342657, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.9123, + "step": 2826 + }, + { + "epoch": 2.258141858141858, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9256, + "step": 2827 + }, + { + "epoch": 2.258941058941059, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.9201, + "step": 2828 + }, + { + "epoch": 2.25974025974026, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9072, + "step": 2829 + }, + { + "epoch": 2.2605394605394604, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 2830 + }, + { + "epoch": 2.2613386613386615, + "grad_norm": 0.73828125, + "learning_rate": 0.0002, + "loss": 0.9153, + "step": 2831 + }, + { + "epoch": 2.262137862137862, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.9099, + "step": 2832 + }, + { + "epoch": 2.262937062937063, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.9088, + "step": 2833 + }, + { + "epoch": 2.2637362637362637, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.9014, + "step": 2834 + }, + { + "epoch": 2.2645354645354647, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9119, + "step": 2835 + }, + { + "epoch": 2.2653346653346653, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.9057, + "step": 2836 + }, + { + "epoch": 2.266133866133866, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9116, + "step": 2837 + }, + { + "epoch": 2.266933066933067, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.9111, + "step": 2838 + }, + { + "epoch": 2.2677322677322675, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.9096, + "step": 2839 + }, + { + "epoch": 2.2685314685314686, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.9151, + "step": 2840 + }, + { + "epoch": 2.269330669330669, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.9177, + "step": 2841 + }, + { + "epoch": 2.27012987012987, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.9074, + "step": 2842 + }, + { + "epoch": 2.270929070929071, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9029, + "step": 2843 + }, + { + "epoch": 2.271728271728272, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.917, + "step": 2844 + }, + { + "epoch": 2.2725274725274724, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.9147, + "step": 2845 + }, + { + "epoch": 2.2733266733266735, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9071, + "step": 2846 + }, + { + "epoch": 2.274125874125874, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.9021, + "step": 2847 + }, + { + "epoch": 2.274925074925075, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.9083, + "step": 2848 + }, + { + "epoch": 2.2757242757242757, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.915, + "step": 2849 + }, + { + "epoch": 2.2765234765234768, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.9123, + "step": 2850 + }, + { + "epoch": 2.2773226773226773, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9114, + "step": 2851 + }, + { + "epoch": 2.278121878121878, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.9108, + "step": 2852 + }, + { + "epoch": 2.278921078921079, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9204, + "step": 2853 + }, + { + "epoch": 2.2797202797202796, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9111, + "step": 2854 + }, + { + "epoch": 2.2805194805194806, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.9105, + "step": 2855 + }, + { + "epoch": 2.281318681318681, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8926, + "step": 2856 + }, + { + "epoch": 2.2821178821178822, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 2857 + }, + { + "epoch": 2.282917082917083, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.9168, + "step": 2858 + }, + { + "epoch": 2.283716283716284, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.9175, + "step": 2859 + }, + { + "epoch": 2.2845154845154845, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9163, + "step": 2860 + }, + { + "epoch": 2.2853146853146855, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.9048, + "step": 2861 + }, + { + "epoch": 2.286113886113886, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9151, + "step": 2862 + }, + { + "epoch": 2.2869130869130867, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9077, + "step": 2863 + }, + { + "epoch": 2.2877122877122877, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9154, + "step": 2864 + }, + { + "epoch": 2.2885114885114883, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9119, + "step": 2865 + }, + { + "epoch": 2.2893106893106894, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9082, + "step": 2866 + }, + { + "epoch": 2.29010989010989, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.9175, + "step": 2867 + }, + { + "epoch": 2.290909090909091, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9087, + "step": 2868 + }, + { + "epoch": 2.2917082917082916, + "grad_norm": 1.8515625, + "learning_rate": 0.0002, + "loss": 0.9176, + "step": 2869 + }, + { + "epoch": 2.2925074925074926, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9025, + "step": 2870 + }, + { + "epoch": 2.2933066933066932, + "grad_norm": 2.109375, + "learning_rate": 0.0002, + "loss": 0.9359, + "step": 2871 + }, + { + "epoch": 2.2941058941058943, + "grad_norm": 0.67578125, + "learning_rate": 0.0002, + "loss": 0.9096, + "step": 2872 + }, + { + "epoch": 2.294905094905095, + "grad_norm": 1.4375, + "learning_rate": 0.0002, + "loss": 0.9116, + "step": 2873 + }, + { + "epoch": 2.2957042957042955, + "grad_norm": 0.85546875, + "learning_rate": 0.0002, + "loss": 0.9128, + "step": 2874 + }, + { + "epoch": 2.2965034965034965, + "grad_norm": 0.86328125, + "learning_rate": 0.0002, + "loss": 0.9136, + "step": 2875 + }, + { + "epoch": 2.2973026973026975, + "grad_norm": 0.96875, + "learning_rate": 0.0002, + "loss": 0.9192, + "step": 2876 + }, + { + "epoch": 2.298101898101898, + "grad_norm": 0.94140625, + "learning_rate": 0.0002, + "loss": 0.9156, + "step": 2877 + }, + { + "epoch": 2.2989010989010987, + "grad_norm": 0.734375, + "learning_rate": 0.0002, + "loss": 0.911, + "step": 2878 + }, + { + "epoch": 2.2997002997002998, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.9158, + "step": 2879 + }, + { + "epoch": 2.3004995004995004, + "grad_norm": 0.62890625, + "learning_rate": 0.0002, + "loss": 0.9136, + "step": 2880 + }, + { + "epoch": 2.3012987012987014, + "grad_norm": 0.578125, + "learning_rate": 0.0002, + "loss": 0.9102, + "step": 2881 + }, + { + "epoch": 2.302097902097902, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.9242, + "step": 2882 + }, + { + "epoch": 2.302897102897103, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 0.9083, + "step": 2883 + }, + { + "epoch": 2.3036963036963036, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.9139, + "step": 2884 + }, + { + "epoch": 2.3044955044955047, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9155, + "step": 2885 + }, + { + "epoch": 2.3052947052947053, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.9102, + "step": 2886 + }, + { + "epoch": 2.3060939060939063, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9123, + "step": 2887 + }, + { + "epoch": 2.306893106893107, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9197, + "step": 2888 + }, + { + "epoch": 2.3076923076923075, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9117, + "step": 2889 + }, + { + "epoch": 2.3084915084915085, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9081, + "step": 2890 + }, + { + "epoch": 2.309290709290709, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.904, + "step": 2891 + }, + { + "epoch": 2.31008991008991, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9156, + "step": 2892 + }, + { + "epoch": 2.3108891108891108, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.9193, + "step": 2893 + }, + { + "epoch": 2.311688311688312, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.918, + "step": 2894 + }, + { + "epoch": 2.3124875124875124, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9195, + "step": 2895 + }, + { + "epoch": 2.3132867132867134, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.912, + "step": 2896 + }, + { + "epoch": 2.314085914085914, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9145, + "step": 2897 + }, + { + "epoch": 2.314885114885115, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9005, + "step": 2898 + }, + { + "epoch": 2.3156843156843157, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9129, + "step": 2899 + }, + { + "epoch": 2.3164835164835162, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.913, + "step": 2900 + }, + { + "epoch": 2.3172827172827173, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9111, + "step": 2901 + }, + { + "epoch": 2.3180819180819183, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9123, + "step": 2902 + }, + { + "epoch": 2.318881118881119, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 0.9247, + "step": 2903 + }, + { + "epoch": 2.3196803196803195, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.9216, + "step": 2904 + }, + { + "epoch": 2.3204795204795206, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.9147, + "step": 2905 + }, + { + "epoch": 2.321278721278721, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9089, + "step": 2906 + }, + { + "epoch": 2.322077922077922, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9097, + "step": 2907 + }, + { + "epoch": 2.322877122877123, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.9079, + "step": 2908 + }, + { + "epoch": 2.323676323676324, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.9137, + "step": 2909 + }, + { + "epoch": 2.3244755244755244, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.909, + "step": 2910 + }, + { + "epoch": 2.3252747252747255, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.9141, + "step": 2911 + }, + { + "epoch": 2.326073926073926, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9113, + "step": 2912 + }, + { + "epoch": 2.326873126873127, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.9131, + "step": 2913 + }, + { + "epoch": 2.3276723276723277, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9151, + "step": 2914 + }, + { + "epoch": 2.3284715284715283, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.909, + "step": 2915 + }, + { + "epoch": 2.3292707292707293, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.9059, + "step": 2916 + }, + { + "epoch": 2.33006993006993, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.9072, + "step": 2917 + }, + { + "epoch": 2.330869130869131, + "grad_norm": 0.60546875, + "learning_rate": 0.0002, + "loss": 0.9092, + "step": 2918 + }, + { + "epoch": 2.3316683316683315, + "grad_norm": 0.80859375, + "learning_rate": 0.0002, + "loss": 0.9078, + "step": 2919 + }, + { + "epoch": 2.3324675324675326, + "grad_norm": 0.890625, + "learning_rate": 0.0002, + "loss": 0.8991, + "step": 2920 + }, + { + "epoch": 2.333266733266733, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.9112, + "step": 2921 + }, + { + "epoch": 2.334065934065934, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.916, + "step": 2922 + }, + { + "epoch": 2.334865134865135, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9019, + "step": 2923 + }, + { + "epoch": 2.335664335664336, + "grad_norm": 0.6484375, + "learning_rate": 0.0002, + "loss": 0.914, + "step": 2924 + }, + { + "epoch": 2.3364635364635364, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.9111, + "step": 2925 + }, + { + "epoch": 2.337262737262737, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9137, + "step": 2926 + }, + { + "epoch": 2.338061938061938, + "grad_norm": 0.68359375, + "learning_rate": 0.0002, + "loss": 0.9148, + "step": 2927 + }, + { + "epoch": 2.338861138861139, + "grad_norm": 0.671875, + "learning_rate": 0.0002, + "loss": 0.9134, + "step": 2928 + }, + { + "epoch": 2.3396603396603397, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.9051, + "step": 2929 + }, + { + "epoch": 2.3404595404595403, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.8993, + "step": 2930 + }, + { + "epoch": 2.3412587412587413, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.9103, + "step": 2931 + }, + { + "epoch": 2.342057942057942, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9081, + "step": 2932 + }, + { + "epoch": 2.342857142857143, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9095, + "step": 2933 + }, + { + "epoch": 2.3436563436563436, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9153, + "step": 2934 + }, + { + "epoch": 2.3444555444555446, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9145, + "step": 2935 + }, + { + "epoch": 2.345254745254745, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 2936 + }, + { + "epoch": 2.3460539460539462, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.9105, + "step": 2937 + }, + { + "epoch": 2.346853146853147, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9248, + "step": 2938 + }, + { + "epoch": 2.347652347652348, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9076, + "step": 2939 + }, + { + "epoch": 2.3484515484515485, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.898, + "step": 2940 + }, + { + "epoch": 2.349250749250749, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.9138, + "step": 2941 + }, + { + "epoch": 2.35004995004995, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9178, + "step": 2942 + }, + { + "epoch": 2.3508491508491507, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.9079, + "step": 2943 + }, + { + "epoch": 2.3516483516483517, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.9118, + "step": 2944 + }, + { + "epoch": 2.3524475524475523, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.9156, + "step": 2945 + }, + { + "epoch": 2.3532467532467534, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.9025, + "step": 2946 + }, + { + "epoch": 2.354045954045954, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.9105, + "step": 2947 + }, + { + "epoch": 2.354845154845155, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9093, + "step": 2948 + }, + { + "epoch": 2.3556443556443556, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.9127, + "step": 2949 + }, + { + "epoch": 2.3564435564435566, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9021, + "step": 2950 + }, + { + "epoch": 2.3572427572427572, + "grad_norm": 0.8359375, + "learning_rate": 0.0002, + "loss": 0.92, + "step": 2951 + }, + { + "epoch": 2.358041958041958, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.905, + "step": 2952 + }, + { + "epoch": 2.358841158841159, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.9075, + "step": 2953 + }, + { + "epoch": 2.3596403596403595, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.9096, + "step": 2954 + }, + { + "epoch": 2.3604395604395605, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.9117, + "step": 2955 + }, + { + "epoch": 2.361238761238761, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.9077, + "step": 2956 + }, + { + "epoch": 2.362037962037962, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.9099, + "step": 2957 + }, + { + "epoch": 2.3628371628371627, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.9109, + "step": 2958 + }, + { + "epoch": 2.3636363636363638, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8972, + "step": 2959 + }, + { + "epoch": 2.3644355644355644, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.9158, + "step": 2960 + }, + { + "epoch": 2.3652347652347654, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.9204, + "step": 2961 + }, + { + "epoch": 2.366033966033966, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.9098, + "step": 2962 + }, + { + "epoch": 2.366833166833167, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.9078, + "step": 2963 + }, + { + "epoch": 2.3676323676323676, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.9077, + "step": 2964 + }, + { + "epoch": 2.3684315684315687, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.9073, + "step": 2965 + }, + { + "epoch": 2.3692307692307693, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.9055, + "step": 2966 + }, + { + "epoch": 2.37002997002997, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.9116, + "step": 2967 + }, + { + "epoch": 2.370829170829171, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9094, + "step": 2968 + }, + { + "epoch": 2.3716283716283715, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.9053, + "step": 2969 + }, + { + "epoch": 2.3724275724275725, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.909, + "step": 2970 + }, + { + "epoch": 2.373226773226773, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.9131, + "step": 2971 + }, + { + "epoch": 2.374025974025974, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.9124, + "step": 2972 + }, + { + "epoch": 2.3748251748251747, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.916, + "step": 2973 + }, + { + "epoch": 2.375624375624376, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.9144, + "step": 2974 + }, + { + "epoch": 2.3764235764235764, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.9066, + "step": 2975 + }, + { + "epoch": 2.3772227772227774, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.9067, + "step": 2976 + }, + { + "epoch": 2.378021978021978, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.9071, + "step": 2977 + }, + { + "epoch": 2.3788211788211786, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9073, + "step": 2978 + }, + { + "epoch": 2.3796203796203796, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9121, + "step": 2979 + }, + { + "epoch": 2.3804195804195802, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.9093, + "step": 2980 + }, + { + "epoch": 2.3812187812187813, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9052, + "step": 2981 + }, + { + "epoch": 2.382017982017982, + "grad_norm": 0.65234375, + "learning_rate": 0.0002, + "loss": 0.9091, + "step": 2982 + }, + { + "epoch": 2.382817182817183, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.914, + "step": 2983 + }, + { + "epoch": 2.3836163836163835, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9107, + "step": 2984 + }, + { + "epoch": 2.3844155844155845, + "grad_norm": 0.70703125, + "learning_rate": 0.0002, + "loss": 0.906, + "step": 2985 + }, + { + "epoch": 2.385214785214785, + "grad_norm": 0.85546875, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 2986 + }, + { + "epoch": 2.386013986013986, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.9095, + "step": 2987 + }, + { + "epoch": 2.3868131868131868, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 0.9088, + "step": 2988 + }, + { + "epoch": 2.3876123876123874, + "grad_norm": 0.67578125, + "learning_rate": 0.0002, + "loss": 0.9073, + "step": 2989 + }, + { + "epoch": 2.3884115884115884, + "grad_norm": 0.828125, + "learning_rate": 0.0002, + "loss": 0.9075, + "step": 2990 + }, + { + "epoch": 2.3892107892107894, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.9086, + "step": 2991 + }, + { + "epoch": 2.39000999000999, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9111, + "step": 2992 + }, + { + "epoch": 2.3908091908091906, + "grad_norm": 0.625, + "learning_rate": 0.0002, + "loss": 0.9132, + "step": 2993 + }, + { + "epoch": 2.3916083916083917, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.905, + "step": 2994 + }, + { + "epoch": 2.3924075924075923, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9027, + "step": 2995 + }, + { + "epoch": 2.3932067932067933, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.9076, + "step": 2996 + }, + { + "epoch": 2.394005994005994, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9086, + "step": 2997 + }, + { + "epoch": 2.394805194805195, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.9171, + "step": 2998 + }, + { + "epoch": 2.3956043956043955, + "grad_norm": 0.60546875, + "learning_rate": 0.0002, + "loss": 0.9231, + "step": 2999 + }, + { + "epoch": 2.3964035964035966, + "grad_norm": 0.6015625, + "learning_rate": 0.0002, + "loss": 0.902, + "step": 3000 + }, + { + "epoch": 2.397202797202797, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.9161, + "step": 3001 + }, + { + "epoch": 2.398001998001998, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9051, + "step": 3002 + }, + { + "epoch": 2.398801198801199, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9114, + "step": 3003 + }, + { + "epoch": 2.3996003996003994, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.914, + "step": 3004 + }, + { + "epoch": 2.4003996003996004, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9069, + "step": 3005 + }, + { + "epoch": 2.401198801198801, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.8968, + "step": 3006 + }, + { + "epoch": 2.401998001998002, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.9135, + "step": 3007 + }, + { + "epoch": 2.4027972027972027, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9001, + "step": 3008 + }, + { + "epoch": 2.4035964035964037, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.9092, + "step": 3009 + }, + { + "epoch": 2.4043956043956043, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9109, + "step": 3010 + }, + { + "epoch": 2.4051948051948053, + "grad_norm": 0.6484375, + "learning_rate": 0.0002, + "loss": 0.9168, + "step": 3011 + }, + { + "epoch": 2.405994005994006, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.9099, + "step": 3012 + }, + { + "epoch": 2.406793206793207, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.9016, + "step": 3013 + }, + { + "epoch": 2.4075924075924076, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9081, + "step": 3014 + }, + { + "epoch": 2.408391608391608, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.9148, + "step": 3015 + }, + { + "epoch": 2.409190809190809, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9177, + "step": 3016 + }, + { + "epoch": 2.4099900099900102, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.9071, + "step": 3017 + }, + { + "epoch": 2.410789210789211, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.8955, + "step": 3018 + }, + { + "epoch": 2.4115884115884114, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.905, + "step": 3019 + }, + { + "epoch": 2.4123876123876125, + "grad_norm": 0.6484375, + "learning_rate": 0.0002, + "loss": 0.9085, + "step": 3020 + }, + { + "epoch": 2.413186813186813, + "grad_norm": 0.609375, + "learning_rate": 0.0002, + "loss": 0.9143, + "step": 3021 + }, + { + "epoch": 2.413986013986014, + "grad_norm": 0.6484375, + "learning_rate": 0.0002, + "loss": 0.9165, + "step": 3022 + }, + { + "epoch": 2.4147852147852147, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.9015, + "step": 3023 + }, + { + "epoch": 2.4155844155844157, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.9015, + "step": 3024 + }, + { + "epoch": 2.4163836163836163, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9176, + "step": 3025 + }, + { + "epoch": 2.4171828171828174, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9137, + "step": 3026 + }, + { + "epoch": 2.417982017982018, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9065, + "step": 3027 + }, + { + "epoch": 2.418781218781219, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9157, + "step": 3028 + }, + { + "epoch": 2.4195804195804196, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.9052, + "step": 3029 + }, + { + "epoch": 2.42037962037962, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.9092, + "step": 3030 + }, + { + "epoch": 2.421178821178821, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.913, + "step": 3031 + }, + { + "epoch": 2.421978021978022, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9112, + "step": 3032 + }, + { + "epoch": 2.422777222777223, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.9188, + "step": 3033 + }, + { + "epoch": 2.4235764235764234, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9079, + "step": 3034 + }, + { + "epoch": 2.4243756243756245, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.9018, + "step": 3035 + }, + { + "epoch": 2.425174825174825, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 3036 + }, + { + "epoch": 2.425974025974026, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.9165, + "step": 3037 + }, + { + "epoch": 2.4267732267732267, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.9013, + "step": 3038 + }, + { + "epoch": 2.4275724275724277, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.9022, + "step": 3039 + }, + { + "epoch": 2.4283716283716283, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9136, + "step": 3040 + }, + { + "epoch": 2.429170829170829, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9158, + "step": 3041 + }, + { + "epoch": 2.42997002997003, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9136, + "step": 3042 + }, + { + "epoch": 2.430769230769231, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.9111, + "step": 3043 + }, + { + "epoch": 2.4315684315684316, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.9101, + "step": 3044 + }, + { + "epoch": 2.432367632367632, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9128, + "step": 3045 + }, + { + "epoch": 2.4331668331668332, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.9165, + "step": 3046 + }, + { + "epoch": 2.433966033966034, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9005, + "step": 3047 + }, + { + "epoch": 2.434765234765235, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.9073, + "step": 3048 + }, + { + "epoch": 2.4355644355644355, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.9232, + "step": 3049 + }, + { + "epoch": 2.4363636363636365, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9108, + "step": 3050 + }, + { + "epoch": 2.437162837162837, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.9137, + "step": 3051 + }, + { + "epoch": 2.437962037962038, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9102, + "step": 3052 + }, + { + "epoch": 2.4387612387612387, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9031, + "step": 3053 + }, + { + "epoch": 2.4395604395604398, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9023, + "step": 3054 + }, + { + "epoch": 2.4403596403596404, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.9107, + "step": 3055 + }, + { + "epoch": 2.441158841158841, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9053, + "step": 3056 + }, + { + "epoch": 2.441958041958042, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9079, + "step": 3057 + }, + { + "epoch": 2.4427572427572426, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9051, + "step": 3058 + }, + { + "epoch": 2.4435564435564436, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9051, + "step": 3059 + }, + { + "epoch": 2.4443556443556442, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9134, + "step": 3060 + }, + { + "epoch": 2.4451548451548453, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9071, + "step": 3061 + }, + { + "epoch": 2.445954045954046, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9043, + "step": 3062 + }, + { + "epoch": 2.446753246753247, + "grad_norm": 1.2421875, + "learning_rate": 0.0002, + "loss": 0.9391, + "step": 3063 + }, + { + "epoch": 2.4475524475524475, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.9067, + "step": 3064 + }, + { + "epoch": 2.4483516483516485, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9133, + "step": 3065 + }, + { + "epoch": 2.449150849150849, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.9078, + "step": 3066 + }, + { + "epoch": 2.4499500499500497, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.9066, + "step": 3067 + }, + { + "epoch": 2.4507492507492508, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.9041, + "step": 3068 + }, + { + "epoch": 2.4515484515484514, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9026, + "step": 3069 + }, + { + "epoch": 2.4523476523476524, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 3070 + }, + { + "epoch": 2.453146853146853, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9042, + "step": 3071 + }, + { + "epoch": 2.453946053946054, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.906, + "step": 3072 + }, + { + "epoch": 2.4547452547452546, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.9046, + "step": 3073 + }, + { + "epoch": 2.4555444555444557, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.9117, + "step": 3074 + }, + { + "epoch": 2.4563436563436563, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9104, + "step": 3075 + }, + { + "epoch": 2.4571428571428573, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.9066, + "step": 3076 + }, + { + "epoch": 2.457942057942058, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9156, + "step": 3077 + }, + { + "epoch": 2.458741258741259, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.9065, + "step": 3078 + }, + { + "epoch": 2.4595404595404595, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.9032, + "step": 3079 + }, + { + "epoch": 2.4603396603396606, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9059, + "step": 3080 + }, + { + "epoch": 2.461138861138861, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.9131, + "step": 3081 + }, + { + "epoch": 2.4619380619380618, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.905, + "step": 3082 + }, + { + "epoch": 2.462737262737263, + "grad_norm": 0.703125, + "learning_rate": 0.0002, + "loss": 0.9097, + "step": 3083 + }, + { + "epoch": 2.4635364635364634, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.9134, + "step": 3084 + }, + { + "epoch": 2.4643356643356644, + "grad_norm": 0.70703125, + "learning_rate": 0.0002, + "loss": 0.907, + "step": 3085 + }, + { + "epoch": 2.465134865134865, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9221, + "step": 3086 + }, + { + "epoch": 2.465934065934066, + "grad_norm": 0.6171875, + "learning_rate": 0.0002, + "loss": 0.907, + "step": 3087 + }, + { + "epoch": 2.4667332667332666, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.9114, + "step": 3088 + }, + { + "epoch": 2.4675324675324677, + "grad_norm": 0.6015625, + "learning_rate": 0.0002, + "loss": 0.9092, + "step": 3089 + }, + { + "epoch": 2.4683316683316683, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9132, + "step": 3090 + }, + { + "epoch": 2.4691308691308693, + "grad_norm": 0.578125, + "learning_rate": 0.0002, + "loss": 0.9102, + "step": 3091 + }, + { + "epoch": 2.46993006993007, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9107, + "step": 3092 + }, + { + "epoch": 2.4707292707292705, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9031, + "step": 3093 + }, + { + "epoch": 2.4715284715284715, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9103, + "step": 3094 + }, + { + "epoch": 2.472327672327672, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.9042, + "step": 3095 + }, + { + "epoch": 2.473126873126873, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9125, + "step": 3096 + }, + { + "epoch": 2.4739260739260738, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9072, + "step": 3097 + }, + { + "epoch": 2.474725274725275, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.9026, + "step": 3098 + }, + { + "epoch": 2.4755244755244754, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.9018, + "step": 3099 + }, + { + "epoch": 2.4763236763236764, + "grad_norm": 0.64453125, + "learning_rate": 0.0002, + "loss": 0.9079, + "step": 3100 + }, + { + "epoch": 2.477122877122877, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.902, + "step": 3101 + }, + { + "epoch": 2.477922077922078, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.9041, + "step": 3102 + }, + { + "epoch": 2.4787212787212787, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9111, + "step": 3103 + }, + { + "epoch": 2.4795204795204797, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.9038, + "step": 3104 + }, + { + "epoch": 2.4803196803196803, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.9045, + "step": 3105 + }, + { + "epoch": 2.4811188811188813, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9096, + "step": 3106 + }, + { + "epoch": 2.481918081918082, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9091, + "step": 3107 + }, + { + "epoch": 2.4827172827172825, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.9042, + "step": 3108 + }, + { + "epoch": 2.4835164835164836, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.9109, + "step": 3109 + }, + { + "epoch": 2.484315684315684, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.9212, + "step": 3110 + }, + { + "epoch": 2.485114885114885, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.9144, + "step": 3111 + }, + { + "epoch": 2.485914085914086, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9117, + "step": 3112 + }, + { + "epoch": 2.486713286713287, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.9045, + "step": 3113 + }, + { + "epoch": 2.4875124875124874, + "grad_norm": 0.62109375, + "learning_rate": 0.0002, + "loss": 0.9079, + "step": 3114 + }, + { + "epoch": 2.4883116883116885, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.9117, + "step": 3115 + }, + { + "epoch": 2.489110889110889, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.9137, + "step": 3116 + }, + { + "epoch": 2.48991008991009, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.9062, + "step": 3117 + }, + { + "epoch": 2.4907092907092907, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9108, + "step": 3118 + }, + { + "epoch": 2.4915084915084913, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 3119 + }, + { + "epoch": 2.4923076923076923, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9087, + "step": 3120 + }, + { + "epoch": 2.493106893106893, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.9018, + "step": 3121 + }, + { + "epoch": 2.493906093906094, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9067, + "step": 3122 + }, + { + "epoch": 2.4947052947052946, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 3123 + }, + { + "epoch": 2.4955044955044956, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9024, + "step": 3124 + }, + { + "epoch": 2.496303696303696, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9054, + "step": 3125 + }, + { + "epoch": 2.4971028971028972, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9069, + "step": 3126 + }, + { + "epoch": 2.497902097902098, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.8999, + "step": 3127 + }, + { + "epoch": 2.498701298701299, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.9054, + "step": 3128 + }, + { + "epoch": 2.4995004995004995, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.8938, + "step": 3129 + }, + { + "epoch": 2.5002997002997, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.9121, + "step": 3130 + }, + { + "epoch": 2.501098901098901, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9115, + "step": 3131 + }, + { + "epoch": 2.501898101898102, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9084, + "step": 3132 + }, + { + "epoch": 2.5026973026973027, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.908, + "step": 3133 + }, + { + "epoch": 2.5034965034965033, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.9091, + "step": 3134 + }, + { + "epoch": 2.5042957042957044, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9073, + "step": 3135 + }, + { + "epoch": 2.505094905094905, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9113, + "step": 3136 + }, + { + "epoch": 2.505894105894106, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.9024, + "step": 3137 + }, + { + "epoch": 2.5066933066933066, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9101, + "step": 3138 + }, + { + "epoch": 2.5074925074925076, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.909, + "step": 3139 + }, + { + "epoch": 2.508291708291708, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.9137, + "step": 3140 + }, + { + "epoch": 2.509090909090909, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.9081, + "step": 3141 + }, + { + "epoch": 2.50989010989011, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.9096, + "step": 3142 + }, + { + "epoch": 2.510689310689311, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.9037, + "step": 3143 + }, + { + "epoch": 2.5114885114885115, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9068, + "step": 3144 + }, + { + "epoch": 2.512287712287712, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.9092, + "step": 3145 + }, + { + "epoch": 2.513086913086913, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9023, + "step": 3146 + }, + { + "epoch": 2.513886113886114, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9054, + "step": 3147 + }, + { + "epoch": 2.5146853146853148, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.9001, + "step": 3148 + }, + { + "epoch": 2.5154845154845153, + "grad_norm": 0.609375, + "learning_rate": 0.0002, + "loss": 0.9116, + "step": 3149 + }, + { + "epoch": 2.5162837162837164, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9163, + "step": 3150 + }, + { + "epoch": 2.517082917082917, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9094, + "step": 3151 + }, + { + "epoch": 2.517882117882118, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.9104, + "step": 3152 + }, + { + "epoch": 2.5186813186813186, + "grad_norm": 0.62890625, + "learning_rate": 0.0002, + "loss": 0.9096, + "step": 3153 + }, + { + "epoch": 2.5194805194805197, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.902, + "step": 3154 + }, + { + "epoch": 2.5202797202797202, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9045, + "step": 3155 + }, + { + "epoch": 2.521078921078921, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.9082, + "step": 3156 + }, + { + "epoch": 2.521878121878122, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9197, + "step": 3157 + }, + { + "epoch": 2.522677322677323, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.9163, + "step": 3158 + }, + { + "epoch": 2.5234765234765235, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.9103, + "step": 3159 + }, + { + "epoch": 2.524275724275724, + "grad_norm": 0.69140625, + "learning_rate": 0.0002, + "loss": 0.9108, + "step": 3160 + }, + { + "epoch": 2.525074925074925, + "grad_norm": 0.69140625, + "learning_rate": 0.0002, + "loss": 0.9078, + "step": 3161 + }, + { + "epoch": 2.5258741258741257, + "grad_norm": 2.125, + "learning_rate": 0.0002, + "loss": 0.9439, + "step": 3162 + }, + { + "epoch": 2.526673326673327, + "grad_norm": 0.62890625, + "learning_rate": 0.0002, + "loss": 0.9032, + "step": 3163 + }, + { + "epoch": 2.5274725274725274, + "grad_norm": 3.171875, + "learning_rate": 0.0002, + "loss": 0.926, + "step": 3164 + }, + { + "epoch": 2.5282717282717284, + "grad_norm": 1.328125, + "learning_rate": 0.0002, + "loss": 0.912, + "step": 3165 + }, + { + "epoch": 2.529070929070929, + "grad_norm": 3.15625, + "learning_rate": 0.0002, + "loss": 0.9224, + "step": 3166 + }, + { + "epoch": 2.5298701298701296, + "grad_norm": 2.265625, + "learning_rate": 0.0002, + "loss": 0.9057, + "step": 3167 + }, + { + "epoch": 2.5306693306693306, + "grad_norm": 1.6640625, + "learning_rate": 0.0002, + "loss": 0.9171, + "step": 3168 + }, + { + "epoch": 2.5314685314685317, + "grad_norm": 1.625, + "learning_rate": 0.0002, + "loss": 0.927, + "step": 3169 + }, + { + "epoch": 2.5322677322677323, + "grad_norm": 2.234375, + "learning_rate": 0.0002, + "loss": 0.9094, + "step": 3170 + }, + { + "epoch": 2.533066933066933, + "grad_norm": 1.0625, + "learning_rate": 0.0002, + "loss": 0.9144, + "step": 3171 + }, + { + "epoch": 2.533866133866134, + "grad_norm": 2.46875, + "learning_rate": 0.0002, + "loss": 0.9225, + "step": 3172 + }, + { + "epoch": 2.5346653346653345, + "grad_norm": 2.015625, + "learning_rate": 0.0002, + "loss": 0.9192, + "step": 3173 + }, + { + "epoch": 2.5354645354645355, + "grad_norm": 0.90625, + "learning_rate": 0.0002, + "loss": 0.9137, + "step": 3174 + }, + { + "epoch": 2.536263736263736, + "grad_norm": 2.109375, + "learning_rate": 0.0002, + "loss": 0.919, + "step": 3175 + }, + { + "epoch": 2.537062937062937, + "grad_norm": 0.8046875, + "learning_rate": 0.0002, + "loss": 0.9173, + "step": 3176 + }, + { + "epoch": 2.5378621378621378, + "grad_norm": 1.5859375, + "learning_rate": 0.0002, + "loss": 0.9199, + "step": 3177 + }, + { + "epoch": 2.538661338661339, + "grad_norm": 1.078125, + "learning_rate": 0.0002, + "loss": 0.9299, + "step": 3178 + }, + { + "epoch": 2.5394605394605394, + "grad_norm": 2.390625, + "learning_rate": 0.0002, + "loss": 0.9246, + "step": 3179 + }, + { + "epoch": 2.5402597402597404, + "grad_norm": 1.203125, + "learning_rate": 0.0002, + "loss": 0.923, + "step": 3180 + }, + { + "epoch": 2.541058941058941, + "grad_norm": 1.53125, + "learning_rate": 0.0002, + "loss": 0.9199, + "step": 3181 + }, + { + "epoch": 2.5418581418581416, + "grad_norm": 0.86328125, + "learning_rate": 0.0002, + "loss": 0.9243, + "step": 3182 + }, + { + "epoch": 2.5426573426573427, + "grad_norm": 1.5, + "learning_rate": 0.0002, + "loss": 0.9191, + "step": 3183 + }, + { + "epoch": 2.5434565434565437, + "grad_norm": 0.9453125, + "learning_rate": 0.0002, + "loss": 0.9037, + "step": 3184 + }, + { + "epoch": 2.5442557442557443, + "grad_norm": 1.6796875, + "learning_rate": 0.0002, + "loss": 0.9125, + "step": 3185 + }, + { + "epoch": 2.545054945054945, + "grad_norm": 1.203125, + "learning_rate": 0.0002, + "loss": 0.9214, + "step": 3186 + }, + { + "epoch": 2.545854145854146, + "grad_norm": 1.5703125, + "learning_rate": 0.0002, + "loss": 0.9175, + "step": 3187 + }, + { + "epoch": 2.5466533466533465, + "grad_norm": 1.296875, + "learning_rate": 0.0002, + "loss": 0.9073, + "step": 3188 + }, + { + "epoch": 2.5474525474525476, + "grad_norm": 1.4609375, + "learning_rate": 0.0002, + "loss": 0.9131, + "step": 3189 + }, + { + "epoch": 2.548251748251748, + "grad_norm": 1.265625, + "learning_rate": 0.0002, + "loss": 0.917, + "step": 3190 + }, + { + "epoch": 2.549050949050949, + "grad_norm": 1.4296875, + "learning_rate": 0.0002, + "loss": 0.9199, + "step": 3191 + }, + { + "epoch": 2.54985014985015, + "grad_norm": 1.3125, + "learning_rate": 0.0002, + "loss": 0.8997, + "step": 3192 + }, + { + "epoch": 2.5506493506493504, + "grad_norm": 1.2734375, + "learning_rate": 0.0002, + "loss": 0.915, + "step": 3193 + }, + { + "epoch": 2.5514485514485514, + "grad_norm": 1.1640625, + "learning_rate": 0.0002, + "loss": 0.9161, + "step": 3194 + }, + { + "epoch": 2.5522477522477525, + "grad_norm": 1.6796875, + "learning_rate": 0.0002, + "loss": 0.9057, + "step": 3195 + }, + { + "epoch": 2.553046953046953, + "grad_norm": 1.671875, + "learning_rate": 0.0002, + "loss": 0.9144, + "step": 3196 + }, + { + "epoch": 2.5538461538461537, + "grad_norm": 1.28125, + "learning_rate": 0.0002, + "loss": 0.911, + "step": 3197 + }, + { + "epoch": 2.5546453546453547, + "grad_norm": 1.1796875, + "learning_rate": 0.0002, + "loss": 0.9103, + "step": 3198 + }, + { + "epoch": 2.5554445554445553, + "grad_norm": 1.5859375, + "learning_rate": 0.0002, + "loss": 0.899, + "step": 3199 + }, + { + "epoch": 2.5562437562437563, + "grad_norm": 1.4375, + "learning_rate": 0.0002, + "loss": 0.9234, + "step": 3200 + }, + { + "epoch": 2.557042957042957, + "grad_norm": 1.5625, + "learning_rate": 0.0002, + "loss": 0.9082, + "step": 3201 + }, + { + "epoch": 2.557842157842158, + "grad_norm": 1.453125, + "learning_rate": 0.0002, + "loss": 0.9098, + "step": 3202 + }, + { + "epoch": 2.5586413586413586, + "grad_norm": 1.2734375, + "learning_rate": 0.0002, + "loss": 0.909, + "step": 3203 + }, + { + "epoch": 2.5594405594405596, + "grad_norm": 1.15625, + "learning_rate": 0.0002, + "loss": 0.9081, + "step": 3204 + }, + { + "epoch": 2.56023976023976, + "grad_norm": 1.2421875, + "learning_rate": 0.0002, + "loss": 0.9078, + "step": 3205 + }, + { + "epoch": 2.5610389610389612, + "grad_norm": 1.078125, + "learning_rate": 0.0002, + "loss": 0.917, + "step": 3206 + }, + { + "epoch": 2.561838161838162, + "grad_norm": 1.1875, + "learning_rate": 0.0002, + "loss": 0.9219, + "step": 3207 + }, + { + "epoch": 2.5626373626373624, + "grad_norm": 1.015625, + "learning_rate": 0.0002, + "loss": 0.9172, + "step": 3208 + }, + { + "epoch": 2.5634365634365635, + "grad_norm": 1.1171875, + "learning_rate": 0.0002, + "loss": 0.9092, + "step": 3209 + }, + { + "epoch": 2.5642357642357645, + "grad_norm": 1.015625, + "learning_rate": 0.0002, + "loss": 0.9185, + "step": 3210 + }, + { + "epoch": 2.565034965034965, + "grad_norm": 1.4921875, + "learning_rate": 0.0002, + "loss": 0.9098, + "step": 3211 + }, + { + "epoch": 2.5658341658341657, + "grad_norm": 1.265625, + "learning_rate": 0.0002, + "loss": 0.9067, + "step": 3212 + }, + { + "epoch": 2.5666333666333667, + "grad_norm": 1.734375, + "learning_rate": 0.0002, + "loss": 0.9161, + "step": 3213 + }, + { + "epoch": 2.5674325674325673, + "grad_norm": 1.453125, + "learning_rate": 0.0002, + "loss": 0.9122, + "step": 3214 + }, + { + "epoch": 2.5682317682317684, + "grad_norm": 1.3984375, + "learning_rate": 0.0002, + "loss": 0.9104, + "step": 3215 + }, + { + "epoch": 2.569030969030969, + "grad_norm": 1.21875, + "learning_rate": 0.0002, + "loss": 0.9175, + "step": 3216 + }, + { + "epoch": 2.56983016983017, + "grad_norm": 1.140625, + "learning_rate": 0.0002, + "loss": 0.9114, + "step": 3217 + }, + { + "epoch": 2.5706293706293706, + "grad_norm": 0.98828125, + "learning_rate": 0.0002, + "loss": 0.9242, + "step": 3218 + }, + { + "epoch": 2.571428571428571, + "grad_norm": 1.1875, + "learning_rate": 0.0002, + "loss": 0.903, + "step": 3219 + }, + { + "epoch": 2.572227772227772, + "grad_norm": 0.94921875, + "learning_rate": 0.0002, + "loss": 0.9154, + "step": 3220 + }, + { + "epoch": 2.5730269730269733, + "grad_norm": 1.2734375, + "learning_rate": 0.0002, + "loss": 0.9097, + "step": 3221 + }, + { + "epoch": 2.573826173826174, + "grad_norm": 1.046875, + "learning_rate": 0.0002, + "loss": 0.9129, + "step": 3222 + }, + { + "epoch": 2.5746253746253744, + "grad_norm": 1.765625, + "learning_rate": 0.0002, + "loss": 0.9102, + "step": 3223 + }, + { + "epoch": 2.5754245754245755, + "grad_norm": 1.5390625, + "learning_rate": 0.0002, + "loss": 0.9071, + "step": 3224 + }, + { + "epoch": 2.576223776223776, + "grad_norm": 1.3125, + "learning_rate": 0.0002, + "loss": 0.9085, + "step": 3225 + }, + { + "epoch": 2.577022977022977, + "grad_norm": 1.1484375, + "learning_rate": 0.0002, + "loss": 0.9039, + "step": 3226 + }, + { + "epoch": 2.5778221778221777, + "grad_norm": 1.1953125, + "learning_rate": 0.0002, + "loss": 0.9109, + "step": 3227 + }, + { + "epoch": 2.5786213786213787, + "grad_norm": 1.0234375, + "learning_rate": 0.0002, + "loss": 0.9112, + "step": 3228 + }, + { + "epoch": 2.5794205794205793, + "grad_norm": 1.46875, + "learning_rate": 0.0002, + "loss": 0.9158, + "step": 3229 + }, + { + "epoch": 2.5802197802197804, + "grad_norm": 1.3515625, + "learning_rate": 0.0002, + "loss": 0.9156, + "step": 3230 + }, + { + "epoch": 2.581018981018981, + "grad_norm": 1.15625, + "learning_rate": 0.0002, + "loss": 0.9044, + "step": 3231 + }, + { + "epoch": 2.581818181818182, + "grad_norm": 1.1484375, + "learning_rate": 0.0002, + "loss": 0.919, + "step": 3232 + }, + { + "epoch": 2.5826173826173826, + "grad_norm": 1.28125, + "learning_rate": 0.0002, + "loss": 0.9144, + "step": 3233 + }, + { + "epoch": 2.583416583416583, + "grad_norm": 1.625, + "learning_rate": 0.0002, + "loss": 0.9297, + "step": 3234 + }, + { + "epoch": 2.5842157842157842, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9101, + "step": 3235 + }, + { + "epoch": 2.5850149850149853, + "grad_norm": 0.7265625, + "learning_rate": 0.0002, + "loss": 0.9062, + "step": 3236 + }, + { + "epoch": 2.585814185814186, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.9026, + "step": 3237 + }, + { + "epoch": 2.5866133866133865, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.9085, + "step": 3238 + }, + { + "epoch": 2.5874125874125875, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.9043, + "step": 3239 + }, + { + "epoch": 2.588211788211788, + "grad_norm": 0.7109375, + "learning_rate": 0.0002, + "loss": 0.9053, + "step": 3240 + }, + { + "epoch": 2.589010989010989, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9041, + "step": 3241 + }, + { + "epoch": 2.5898101898101897, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.9175, + "step": 3242 + }, + { + "epoch": 2.5906093906093908, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9038, + "step": 3243 + }, + { + "epoch": 2.5914085914085914, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.9085, + "step": 3244 + }, + { + "epoch": 2.592207792207792, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.9057, + "step": 3245 + }, + { + "epoch": 2.593006993006993, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.904, + "step": 3246 + }, + { + "epoch": 2.593806193806194, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9066, + "step": 3247 + }, + { + "epoch": 2.5946053946053946, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.9144, + "step": 3248 + }, + { + "epoch": 2.5954045954045952, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9121, + "step": 3249 + }, + { + "epoch": 2.5962037962037963, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.9115, + "step": 3250 + }, + { + "epoch": 2.597002997002997, + "grad_norm": 0.73828125, + "learning_rate": 0.0002, + "loss": 0.9405, + "step": 3251 + }, + { + "epoch": 2.597802197802198, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.9081, + "step": 3252 + }, + { + "epoch": 2.5986013986013985, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.9134, + "step": 3253 + }, + { + "epoch": 2.5994005994005995, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.9119, + "step": 3254 + }, + { + "epoch": 2.6001998001998, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.9016, + "step": 3255 + }, + { + "epoch": 2.600999000999001, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.9128, + "step": 3256 + }, + { + "epoch": 2.6017982017982018, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.9074, + "step": 3257 + }, + { + "epoch": 2.602597402597403, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.9077, + "step": 3258 + }, + { + "epoch": 2.6033966033966034, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.9071, + "step": 3259 + }, + { + "epoch": 2.604195804195804, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.9148, + "step": 3260 + }, + { + "epoch": 2.604995004995005, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.9159, + "step": 3261 + }, + { + "epoch": 2.605794205794206, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.9107, + "step": 3262 + }, + { + "epoch": 2.6065934065934067, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.9233, + "step": 3263 + }, + { + "epoch": 2.6073926073926073, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.9012, + "step": 3264 + }, + { + "epoch": 2.6081918081918083, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.9068, + "step": 3265 + }, + { + "epoch": 2.608991008991009, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.9042, + "step": 3266 + }, + { + "epoch": 2.60979020979021, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.9161, + "step": 3267 + }, + { + "epoch": 2.6105894105894105, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.9039, + "step": 3268 + }, + { + "epoch": 2.6113886113886116, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.9029, + "step": 3269 + }, + { + "epoch": 2.612187812187812, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.9076, + "step": 3270 + }, + { + "epoch": 2.6129870129870127, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9025, + "step": 3271 + }, + { + "epoch": 2.613786213786214, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9038, + "step": 3272 + }, + { + "epoch": 2.614585414585415, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.9086, + "step": 3273 + }, + { + "epoch": 2.6153846153846154, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.912, + "step": 3274 + }, + { + "epoch": 2.616183816183816, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.9069, + "step": 3275 + }, + { + "epoch": 2.616983016983017, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.9199, + "step": 3276 + }, + { + "epoch": 2.6177822177822176, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.9082, + "step": 3277 + }, + { + "epoch": 2.6185814185814187, + "grad_norm": 0.60546875, + "learning_rate": 0.0002, + "loss": 0.9052, + "step": 3278 + }, + { + "epoch": 2.6193806193806193, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9118, + "step": 3279 + }, + { + "epoch": 2.6201798201798203, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9015, + "step": 3280 + }, + { + "epoch": 2.620979020979021, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 3281 + }, + { + "epoch": 2.6217782217782215, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.9061, + "step": 3282 + }, + { + "epoch": 2.6225774225774225, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.8993, + "step": 3283 + }, + { + "epoch": 2.6233766233766236, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9013, + "step": 3284 + }, + { + "epoch": 2.624175824175824, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.8978, + "step": 3285 + }, + { + "epoch": 2.6249750249750248, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.898, + "step": 3286 + }, + { + "epoch": 2.625774225774226, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.9036, + "step": 3287 + }, + { + "epoch": 2.626573426573427, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9077, + "step": 3288 + }, + { + "epoch": 2.6273726273726274, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.903, + "step": 3289 + }, + { + "epoch": 2.628171828171828, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.9045, + "step": 3290 + }, + { + "epoch": 2.628971028971029, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9066, + "step": 3291 + }, + { + "epoch": 2.6297702297702297, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.9074, + "step": 3292 + }, + { + "epoch": 2.6305694305694307, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9119, + "step": 3293 + }, + { + "epoch": 2.6313686313686313, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9108, + "step": 3294 + }, + { + "epoch": 2.6321678321678323, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.9174, + "step": 3295 + }, + { + "epoch": 2.632967032967033, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.906, + "step": 3296 + }, + { + "epoch": 2.6337662337662335, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.91, + "step": 3297 + }, + { + "epoch": 2.6345654345654346, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9065, + "step": 3298 + }, + { + "epoch": 2.6353646353646356, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.9039, + "step": 3299 + }, + { + "epoch": 2.636163836163836, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8979, + "step": 3300 + }, + { + "epoch": 2.636963036963037, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9037, + "step": 3301 + }, + { + "epoch": 2.637762237762238, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9102, + "step": 3302 + }, + { + "epoch": 2.6385614385614384, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.9027, + "step": 3303 + }, + { + "epoch": 2.6393606393606395, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.9123, + "step": 3304 + }, + { + "epoch": 2.64015984015984, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.9073, + "step": 3305 + }, + { + "epoch": 2.640959040959041, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.9097, + "step": 3306 + }, + { + "epoch": 2.6417582417582417, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.9141, + "step": 3307 + }, + { + "epoch": 2.6425574425574423, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9044, + "step": 3308 + }, + { + "epoch": 2.6433566433566433, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.9025, + "step": 3309 + }, + { + "epoch": 2.6441558441558444, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.9032, + "step": 3310 + }, + { + "epoch": 2.644955044955045, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.9068, + "step": 3311 + }, + { + "epoch": 2.6457542457542456, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.9041, + "step": 3312 + }, + { + "epoch": 2.6465534465534466, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.9076, + "step": 3313 + }, + { + "epoch": 2.647352647352647, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.9076, + "step": 3314 + }, + { + "epoch": 2.6481518481518482, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.9005, + "step": 3315 + }, + { + "epoch": 2.648951048951049, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.9041, + "step": 3316 + }, + { + "epoch": 2.64975024975025, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.9031, + "step": 3317 + }, + { + "epoch": 2.6505494505494505, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.9, + "step": 3318 + }, + { + "epoch": 2.6513486513486515, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.9043, + "step": 3319 + }, + { + "epoch": 2.652147852147852, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.9032, + "step": 3320 + }, + { + "epoch": 2.652947052947053, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.9006, + "step": 3321 + }, + { + "epoch": 2.6537462537462537, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.9109, + "step": 3322 + }, + { + "epoch": 2.6545454545454543, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.9077, + "step": 3323 + }, + { + "epoch": 2.6553446553446554, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.9051, + "step": 3324 + }, + { + "epoch": 2.6561438561438564, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.9146, + "step": 3325 + }, + { + "epoch": 2.656943056943057, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.9061, + "step": 3326 + }, + { + "epoch": 2.6577422577422576, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.9174, + "step": 3327 + }, + { + "epoch": 2.6585414585414586, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.9077, + "step": 3328 + }, + { + "epoch": 2.659340659340659, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.9087, + "step": 3329 + }, + { + "epoch": 2.6601398601398603, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.9081, + "step": 3330 + }, + { + "epoch": 2.660939060939061, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.917, + "step": 3331 + }, + { + "epoch": 2.661738261738262, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.903, + "step": 3332 + }, + { + "epoch": 2.6625374625374625, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.9169, + "step": 3333 + }, + { + "epoch": 2.663336663336663, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.9182, + "step": 3334 + }, + { + "epoch": 2.664135864135864, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 3335 + }, + { + "epoch": 2.664935064935065, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.9023, + "step": 3336 + }, + { + "epoch": 2.6657342657342658, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.9082, + "step": 3337 + }, + { + "epoch": 2.6665334665334663, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.908, + "step": 3338 + }, + { + "epoch": 2.6673326673326674, + "grad_norm": 0.8984375, + "learning_rate": 0.0002, + "loss": 0.9265, + "step": 3339 + }, + { + "epoch": 2.668131868131868, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.9038, + "step": 3340 + }, + { + "epoch": 2.668931068931069, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.9046, + "step": 3341 + }, + { + "epoch": 2.6697302697302696, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.9078, + "step": 3342 + }, + { + "epoch": 2.6705294705294707, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.9036, + "step": 3343 + }, + { + "epoch": 2.6713286713286712, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.9018, + "step": 3344 + }, + { + "epoch": 2.6721278721278723, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.9078, + "step": 3345 + }, + { + "epoch": 2.672927072927073, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9144, + "step": 3346 + }, + { + "epoch": 2.673726273726274, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.915, + "step": 3347 + }, + { + "epoch": 2.6745254745254745, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.9117, + "step": 3348 + }, + { + "epoch": 2.675324675324675, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.9114, + "step": 3349 + }, + { + "epoch": 2.676123876123876, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.9086, + "step": 3350 + }, + { + "epoch": 2.676923076923077, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.9114, + "step": 3351 + }, + { + "epoch": 2.6777222777222778, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.911, + "step": 3352 + }, + { + "epoch": 2.6785214785214784, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.9133, + "step": 3353 + }, + { + "epoch": 2.6793206793206794, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9107, + "step": 3354 + }, + { + "epoch": 2.68011988011988, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.9043, + "step": 3355 + }, + { + "epoch": 2.680919080919081, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9088, + "step": 3356 + }, + { + "epoch": 2.6817182817182816, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.9069, + "step": 3357 + }, + { + "epoch": 2.6825174825174827, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9124, + "step": 3358 + }, + { + "epoch": 2.6833166833166833, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.9163, + "step": 3359 + }, + { + "epoch": 2.684115884115884, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.9076, + "step": 3360 + }, + { + "epoch": 2.684915084915085, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9266, + "step": 3361 + }, + { + "epoch": 2.685714285714286, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.9091, + "step": 3362 + }, + { + "epoch": 2.6865134865134865, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8926, + "step": 3363 + }, + { + "epoch": 2.687312687312687, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.9107, + "step": 3364 + }, + { + "epoch": 2.688111888111888, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9177, + "step": 3365 + }, + { + "epoch": 2.6889110889110888, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.9087, + "step": 3366 + }, + { + "epoch": 2.68971028971029, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.9047, + "step": 3367 + }, + { + "epoch": 2.6905094905094904, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.9081, + "step": 3368 + }, + { + "epoch": 2.6913086913086914, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.9046, + "step": 3369 + }, + { + "epoch": 2.692107892107892, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.9098, + "step": 3370 + }, + { + "epoch": 2.692907092907093, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.9038, + "step": 3371 + }, + { + "epoch": 2.6937062937062937, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9305, + "step": 3372 + }, + { + "epoch": 2.6945054945054947, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.9079, + "step": 3373 + }, + { + "epoch": 2.6953046953046953, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8961, + "step": 3374 + }, + { + "epoch": 2.696103896103896, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 3375 + }, + { + "epoch": 2.696903096903097, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.9053, + "step": 3376 + }, + { + "epoch": 2.697702297702298, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.9016, + "step": 3377 + }, + { + "epoch": 2.6985014985014986, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.9017, + "step": 3378 + }, + { + "epoch": 2.699300699300699, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.907, + "step": 3379 + }, + { + "epoch": 2.7000999000999, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.9, + "step": 3380 + }, + { + "epoch": 2.700899100899101, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.9146, + "step": 3381 + }, + { + "epoch": 2.701698301698302, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.9189, + "step": 3382 + }, + { + "epoch": 2.7024975024975024, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.9047, + "step": 3383 + }, + { + "epoch": 2.7032967032967035, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.9053, + "step": 3384 + }, + { + "epoch": 2.704095904095904, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.904, + "step": 3385 + }, + { + "epoch": 2.7048951048951047, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.9067, + "step": 3386 + }, + { + "epoch": 2.7056943056943057, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.9043, + "step": 3387 + }, + { + "epoch": 2.7064935064935067, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 3388 + }, + { + "epoch": 2.7072927072927073, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.9065, + "step": 3389 + }, + { + "epoch": 2.708091908091908, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.9083, + "step": 3390 + }, + { + "epoch": 2.708891108891109, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.9056, + "step": 3391 + }, + { + "epoch": 2.7096903096903096, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9002, + "step": 3392 + }, + { + "epoch": 2.7104895104895106, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.9084, + "step": 3393 + }, + { + "epoch": 2.711288711288711, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9012, + "step": 3394 + }, + { + "epoch": 2.7120879120879122, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 3395 + }, + { + "epoch": 2.712887112887113, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.9126, + "step": 3396 + }, + { + "epoch": 2.7136863136863134, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 3397 + }, + { + "epoch": 2.7144855144855145, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.9061, + "step": 3398 + }, + { + "epoch": 2.7152847152847155, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.903, + "step": 3399 + }, + { + "epoch": 2.716083916083916, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.9156, + "step": 3400 + }, + { + "epoch": 2.7168831168831167, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.898, + "step": 3401 + }, + { + "epoch": 2.7176823176823177, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.9051, + "step": 3402 + }, + { + "epoch": 2.7184815184815188, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.9125, + "step": 3403 + }, + { + "epoch": 2.7192807192807193, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.9096, + "step": 3404 + }, + { + "epoch": 2.72007992007992, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.903, + "step": 3405 + }, + { + "epoch": 2.720879120879121, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.9, + "step": 3406 + }, + { + "epoch": 2.7216783216783216, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.9118, + "step": 3407 + }, + { + "epoch": 2.7224775224775226, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.9037, + "step": 3408 + }, + { + "epoch": 2.723276723276723, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.9019, + "step": 3409 + }, + { + "epoch": 2.7240759240759242, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.9125, + "step": 3410 + }, + { + "epoch": 2.724875124875125, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.908, + "step": 3411 + }, + { + "epoch": 2.7256743256743254, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.9035, + "step": 3412 + }, + { + "epoch": 2.7264735264735265, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.9152, + "step": 3413 + }, + { + "epoch": 2.7272727272727275, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.9066, + "step": 3414 + }, + { + "epoch": 2.728071928071928, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 3415 + }, + { + "epoch": 2.7288711288711287, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.9034, + "step": 3416 + }, + { + "epoch": 2.7296703296703297, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.9116, + "step": 3417 + }, + { + "epoch": 2.7304695304695303, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9054, + "step": 3418 + }, + { + "epoch": 2.7312687312687314, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 3419 + }, + { + "epoch": 2.732067932067932, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.9052, + "step": 3420 + }, + { + "epoch": 2.732867132867133, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.9091, + "step": 3421 + }, + { + "epoch": 2.7336663336663336, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.908, + "step": 3422 + }, + { + "epoch": 2.734465534465534, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.9014, + "step": 3423 + }, + { + "epoch": 2.7352647352647352, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.9065, + "step": 3424 + }, + { + "epoch": 2.7360639360639363, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8942, + "step": 3425 + }, + { + "epoch": 2.736863136863137, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9082, + "step": 3426 + }, + { + "epoch": 2.7376623376623375, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.9015, + "step": 3427 + }, + { + "epoch": 2.7384615384615385, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.908, + "step": 3428 + }, + { + "epoch": 2.739260739260739, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.9036, + "step": 3429 + }, + { + "epoch": 2.74005994005994, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9054, + "step": 3430 + }, + { + "epoch": 2.7408591408591407, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 3431 + }, + { + "epoch": 2.7416583416583418, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.9029, + "step": 3432 + }, + { + "epoch": 2.7424575424575424, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9073, + "step": 3433 + }, + { + "epoch": 2.7432567432567434, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.9002, + "step": 3434 + }, + { + "epoch": 2.744055944055944, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 3435 + }, + { + "epoch": 2.744855144855145, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.9035, + "step": 3436 + }, + { + "epoch": 2.7456543456543456, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.9023, + "step": 3437 + }, + { + "epoch": 2.7464535464535462, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9061, + "step": 3438 + }, + { + "epoch": 2.7472527472527473, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 3439 + }, + { + "epoch": 2.7480519480519483, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.9115, + "step": 3440 + }, + { + "epoch": 2.748851148851149, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9108, + "step": 3441 + }, + { + "epoch": 2.7496503496503495, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.9152, + "step": 3442 + }, + { + "epoch": 2.7504495504495505, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9063, + "step": 3443 + }, + { + "epoch": 2.751248751248751, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9074, + "step": 3444 + }, + { + "epoch": 2.752047952047952, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.9117, + "step": 3445 + }, + { + "epoch": 2.7528471528471528, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9097, + "step": 3446 + }, + { + "epoch": 2.753646353646354, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.9057, + "step": 3447 + }, + { + "epoch": 2.7544455544455544, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.8985, + "step": 3448 + }, + { + "epoch": 2.755244755244755, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9104, + "step": 3449 + }, + { + "epoch": 2.756043956043956, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.8983, + "step": 3450 + }, + { + "epoch": 2.756843156843157, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.904, + "step": 3451 + }, + { + "epoch": 2.7576423576423577, + "grad_norm": 0.76171875, + "learning_rate": 0.0002, + "loss": 0.9123, + "step": 3452 + }, + { + "epoch": 2.7584415584415583, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9073, + "step": 3453 + }, + { + "epoch": 2.7592407592407593, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.9118, + "step": 3454 + }, + { + "epoch": 2.76003996003996, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.916, + "step": 3455 + }, + { + "epoch": 2.760839160839161, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.9152, + "step": 3456 + }, + { + "epoch": 2.7616383616383615, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 3457 + }, + { + "epoch": 2.7624375624375626, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9091, + "step": 3458 + }, + { + "epoch": 2.763236763236763, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.9136, + "step": 3459 + }, + { + "epoch": 2.764035964035964, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.9068, + "step": 3460 + }, + { + "epoch": 2.764835164835165, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.9122, + "step": 3461 + }, + { + "epoch": 2.765634365634366, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.9011, + "step": 3462 + }, + { + "epoch": 2.7664335664335664, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.9006, + "step": 3463 + }, + { + "epoch": 2.767232767232767, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.906, + "step": 3464 + }, + { + "epoch": 2.768031968031968, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.9116, + "step": 3465 + }, + { + "epoch": 2.768831168831169, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9059, + "step": 3466 + }, + { + "epoch": 2.7696303696303697, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.9128, + "step": 3467 + }, + { + "epoch": 2.7704295704295703, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9039, + "step": 3468 + }, + { + "epoch": 2.7712287712287713, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9055, + "step": 3469 + }, + { + "epoch": 2.772027972027972, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.8985, + "step": 3470 + }, + { + "epoch": 2.772827172827173, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.9035, + "step": 3471 + }, + { + "epoch": 2.7736263736263735, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9197, + "step": 3472 + }, + { + "epoch": 2.7744255744255746, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.9045, + "step": 3473 + }, + { + "epoch": 2.775224775224775, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.9036, + "step": 3474 + }, + { + "epoch": 2.7760239760239758, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.9025, + "step": 3475 + }, + { + "epoch": 2.776823176823177, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 3476 + }, + { + "epoch": 2.777622377622378, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.9039, + "step": 3477 + }, + { + "epoch": 2.7784215784215784, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.9005, + "step": 3478 + }, + { + "epoch": 2.779220779220779, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.907, + "step": 3479 + }, + { + "epoch": 2.78001998001998, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.9046, + "step": 3480 + }, + { + "epoch": 2.7808191808191807, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.9013, + "step": 3481 + }, + { + "epoch": 2.7816183816183817, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8991, + "step": 3482 + }, + { + "epoch": 2.7824175824175823, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.9183, + "step": 3483 + }, + { + "epoch": 2.7832167832167833, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8973, + "step": 3484 + }, + { + "epoch": 2.784015984015984, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.9025, + "step": 3485 + }, + { + "epoch": 2.784815184815185, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.9098, + "step": 3486 + }, + { + "epoch": 2.7856143856143856, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9513, + "step": 3487 + }, + { + "epoch": 2.7864135864135866, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8985, + "step": 3488 + }, + { + "epoch": 2.787212787212787, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 3489 + }, + { + "epoch": 2.788011988011988, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.9126, + "step": 3490 + }, + { + "epoch": 2.788811188811189, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.9105, + "step": 3491 + }, + { + "epoch": 2.78961038961039, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.904, + "step": 3492 + }, + { + "epoch": 2.7904095904095905, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 3493 + }, + { + "epoch": 2.791208791208791, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.908, + "step": 3494 + }, + { + "epoch": 2.792007992007992, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 3495 + }, + { + "epoch": 2.7928071928071927, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.9023, + "step": 3496 + }, + { + "epoch": 2.7936063936063937, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.9005, + "step": 3497 + }, + { + "epoch": 2.7944055944055943, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.9097, + "step": 3498 + }, + { + "epoch": 2.7952047952047954, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.9009, + "step": 3499 + }, + { + "epoch": 2.796003996003996, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.908, + "step": 3500 + }, + { + "epoch": 2.7968031968031966, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 3501 + }, + { + "epoch": 2.7976023976023976, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.9012, + "step": 3502 + }, + { + "epoch": 2.7984015984015986, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 3503 + }, + { + "epoch": 2.7992007992007992, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.9113, + "step": 3504 + }, + { + "epoch": 2.8, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.9094, + "step": 3505 + }, + { + "epoch": 2.800799200799201, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8987, + "step": 3506 + }, + { + "epoch": 2.8015984015984015, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.9045, + "step": 3507 + }, + { + "epoch": 2.8023976023976025, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.9056, + "step": 3508 + }, + { + "epoch": 2.803196803196803, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.9013, + "step": 3509 + }, + { + "epoch": 2.803996003996004, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9035, + "step": 3510 + }, + { + "epoch": 2.8047952047952047, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.9097, + "step": 3511 + }, + { + "epoch": 2.8055944055944058, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.9052, + "step": 3512 + }, + { + "epoch": 2.8063936063936064, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.9078, + "step": 3513 + }, + { + "epoch": 2.8071928071928074, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.9078, + "step": 3514 + }, + { + "epoch": 2.807992007992008, + "grad_norm": 3.515625, + "learning_rate": 0.0002, + "loss": 0.9342, + "step": 3515 + }, + { + "epoch": 2.8087912087912086, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.9, + "step": 3516 + }, + { + "epoch": 2.8095904095904096, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.899, + "step": 3517 + }, + { + "epoch": 2.8103896103896107, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.9218, + "step": 3518 + }, + { + "epoch": 2.8111888111888113, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.9091, + "step": 3519 + }, + { + "epoch": 2.811988011988012, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.9161, + "step": 3520 + }, + { + "epoch": 2.812787212787213, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.9132, + "step": 3521 + }, + { + "epoch": 2.8135864135864135, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.9081, + "step": 3522 + }, + { + "epoch": 2.8143856143856145, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.9084, + "step": 3523 + }, + { + "epoch": 2.815184815184815, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.9126, + "step": 3524 + }, + { + "epoch": 2.815984015984016, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.9102, + "step": 3525 + }, + { + "epoch": 2.8167832167832167, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9122, + "step": 3526 + }, + { + "epoch": 2.8175824175824173, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.9182, + "step": 3527 + }, + { + "epoch": 2.8183816183816184, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.9144, + "step": 3528 + }, + { + "epoch": 2.8191808191808194, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.9048, + "step": 3529 + }, + { + "epoch": 2.81998001998002, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 3530 + }, + { + "epoch": 2.8207792207792206, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 3531 + }, + { + "epoch": 2.8215784215784216, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9006, + "step": 3532 + }, + { + "epoch": 2.8223776223776222, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9138, + "step": 3533 + }, + { + "epoch": 2.8231768231768233, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9033, + "step": 3534 + }, + { + "epoch": 2.823976023976024, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 3535 + }, + { + "epoch": 2.824775224775225, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.9085, + "step": 3536 + }, + { + "epoch": 2.8255744255744255, + "grad_norm": 1.09375, + "learning_rate": 0.0002, + "loss": 0.9351, + "step": 3537 + }, + { + "epoch": 2.826373626373626, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9145, + "step": 3538 + }, + { + "epoch": 2.827172827172827, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.9123, + "step": 3539 + }, + { + "epoch": 2.827972027972028, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9005, + "step": 3540 + }, + { + "epoch": 2.8287712287712288, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.899, + "step": 3541 + }, + { + "epoch": 2.8295704295704294, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.9079, + "step": 3542 + }, + { + "epoch": 2.8303696303696304, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9113, + "step": 3543 + }, + { + "epoch": 2.8311688311688314, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.9128, + "step": 3544 + }, + { + "epoch": 2.831968031968032, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9098, + "step": 3545 + }, + { + "epoch": 2.8327672327672326, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 3546 + }, + { + "epoch": 2.8335664335664337, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9069, + "step": 3547 + }, + { + "epoch": 2.8343656343656343, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.9085, + "step": 3548 + }, + { + "epoch": 2.8351648351648353, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.9047, + "step": 3549 + }, + { + "epoch": 2.835964035964036, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.9055, + "step": 3550 + }, + { + "epoch": 2.836763236763237, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9026, + "step": 3551 + }, + { + "epoch": 2.8375624375624375, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9104, + "step": 3552 + }, + { + "epoch": 2.838361638361638, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9051, + "step": 3553 + }, + { + "epoch": 2.839160839160839, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.9042, + "step": 3554 + }, + { + "epoch": 2.83996003996004, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9052, + "step": 3555 + }, + { + "epoch": 2.840759240759241, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.9086, + "step": 3556 + }, + { + "epoch": 2.8415584415584414, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.9143, + "step": 3557 + }, + { + "epoch": 2.8423576423576424, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.9062, + "step": 3558 + }, + { + "epoch": 2.843156843156843, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9147, + "step": 3559 + }, + { + "epoch": 2.843956043956044, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 3560 + }, + { + "epoch": 2.8447552447552447, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9128, + "step": 3561 + }, + { + "epoch": 2.8455544455544457, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9099, + "step": 3562 + }, + { + "epoch": 2.8463536463536463, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9171, + "step": 3563 + }, + { + "epoch": 2.847152847152847, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.9027, + "step": 3564 + }, + { + "epoch": 2.847952047952048, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9094, + "step": 3565 + }, + { + "epoch": 2.848751248751249, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 3566 + }, + { + "epoch": 2.8495504495504496, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.9134, + "step": 3567 + }, + { + "epoch": 2.85034965034965, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.905, + "step": 3568 + }, + { + "epoch": 2.851148851148851, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9071, + "step": 3569 + }, + { + "epoch": 2.851948051948052, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9084, + "step": 3570 + }, + { + "epoch": 2.852747252747253, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.8968, + "step": 3571 + }, + { + "epoch": 2.8535464535464534, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9051, + "step": 3572 + }, + { + "epoch": 2.8543456543456545, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 3573 + }, + { + "epoch": 2.855144855144855, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9069, + "step": 3574 + }, + { + "epoch": 2.855944055944056, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.9051, + "step": 3575 + }, + { + "epoch": 2.8567432567432567, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9114, + "step": 3576 + }, + { + "epoch": 2.8575424575424577, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9039, + "step": 3577 + }, + { + "epoch": 2.8583416583416583, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9102, + "step": 3578 + }, + { + "epoch": 2.859140859140859, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9058, + "step": 3579 + }, + { + "epoch": 2.85994005994006, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9001, + "step": 3580 + }, + { + "epoch": 2.860739260739261, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9001, + "step": 3581 + }, + { + "epoch": 2.8615384615384616, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9023, + "step": 3582 + }, + { + "epoch": 2.862337662337662, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9034, + "step": 3583 + }, + { + "epoch": 2.863136863136863, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9108, + "step": 3584 + }, + { + "epoch": 2.863936063936064, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.8997, + "step": 3585 + }, + { + "epoch": 2.864735264735265, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9019, + "step": 3586 + }, + { + "epoch": 2.8655344655344654, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9044, + "step": 3587 + }, + { + "epoch": 2.8663336663336665, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 3588 + }, + { + "epoch": 2.867132867132867, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.9064, + "step": 3589 + }, + { + "epoch": 2.8679320679320677, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.9116, + "step": 3590 + }, + { + "epoch": 2.8687312687312687, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9036, + "step": 3591 + }, + { + "epoch": 2.8695304695304698, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.9096, + "step": 3592 + }, + { + "epoch": 2.8703296703296703, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9101, + "step": 3593 + }, + { + "epoch": 2.871128871128871, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.8998, + "step": 3594 + }, + { + "epoch": 2.871928071928072, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.9115, + "step": 3595 + }, + { + "epoch": 2.8727272727272726, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.9052, + "step": 3596 + }, + { + "epoch": 2.8735264735264736, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9039, + "step": 3597 + }, + { + "epoch": 2.874325674325674, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9061, + "step": 3598 + }, + { + "epoch": 2.8751248751248752, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9006, + "step": 3599 + }, + { + "epoch": 2.875924075924076, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.9033, + "step": 3600 + }, + { + "epoch": 2.876723276723277, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 3601 + }, + { + "epoch": 2.8775224775224775, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9106, + "step": 3602 + }, + { + "epoch": 2.8783216783216785, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.9129, + "step": 3603 + }, + { + "epoch": 2.879120879120879, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9147, + "step": 3604 + }, + { + "epoch": 2.8799200799200797, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.9194, + "step": 3605 + }, + { + "epoch": 2.8807192807192807, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.9029, + "step": 3606 + }, + { + "epoch": 2.8815184815184818, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9017, + "step": 3607 + }, + { + "epoch": 2.8823176823176824, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9148, + "step": 3608 + }, + { + "epoch": 2.883116883116883, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9039, + "step": 3609 + }, + { + "epoch": 2.883916083916084, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 3610 + }, + { + "epoch": 2.8847152847152846, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.9127, + "step": 3611 + }, + { + "epoch": 2.8855144855144856, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.9068, + "step": 3612 + }, + { + "epoch": 2.8863136863136862, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.906, + "step": 3613 + }, + { + "epoch": 2.8871128871128873, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9032, + "step": 3614 + }, + { + "epoch": 2.887912087912088, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9098, + "step": 3615 + }, + { + "epoch": 2.8887112887112885, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9037, + "step": 3616 + }, + { + "epoch": 2.8895104895104895, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.8994, + "step": 3617 + }, + { + "epoch": 2.8903096903096905, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9166, + "step": 3618 + }, + { + "epoch": 2.891108891108891, + "grad_norm": 0.65234375, + "learning_rate": 0.0002, + "loss": 0.9054, + "step": 3619 + }, + { + "epoch": 2.8919080919080917, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.9058, + "step": 3620 + }, + { + "epoch": 2.8927072927072928, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.9011, + "step": 3621 + }, + { + "epoch": 2.8935064935064934, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9064, + "step": 3622 + }, + { + "epoch": 2.8943056943056944, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.8991, + "step": 3623 + }, + { + "epoch": 2.895104895104895, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9142, + "step": 3624 + }, + { + "epoch": 2.895904095904096, + "grad_norm": 0.640625, + "learning_rate": 0.0002, + "loss": 0.9064, + "step": 3625 + }, + { + "epoch": 2.8967032967032966, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9075, + "step": 3626 + }, + { + "epoch": 2.8975024975024977, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.9026, + "step": 3627 + }, + { + "epoch": 2.8983016983016983, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9046, + "step": 3628 + }, + { + "epoch": 2.8991008991008993, + "grad_norm": 0.59765625, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 3629 + }, + { + "epoch": 2.8999000999001, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.8998, + "step": 3630 + }, + { + "epoch": 2.9006993006993005, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9112, + "step": 3631 + }, + { + "epoch": 2.9014985014985015, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.9091, + "step": 3632 + }, + { + "epoch": 2.9022977022977026, + "grad_norm": 0.609375, + "learning_rate": 0.0002, + "loss": 0.9045, + "step": 3633 + }, + { + "epoch": 2.903096903096903, + "grad_norm": 1.9765625, + "learning_rate": 0.0002, + "loss": 0.928, + "step": 3634 + }, + { + "epoch": 2.9038961038961038, + "grad_norm": 0.640625, + "learning_rate": 0.0002, + "loss": 0.9047, + "step": 3635 + }, + { + "epoch": 2.904695304695305, + "grad_norm": 0.67578125, + "learning_rate": 0.0002, + "loss": 0.9082, + "step": 3636 + }, + { + "epoch": 2.9054945054945054, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9071, + "step": 3637 + }, + { + "epoch": 2.9062937062937064, + "grad_norm": 0.71875, + "learning_rate": 0.0002, + "loss": 0.9048, + "step": 3638 + }, + { + "epoch": 2.907092907092907, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.9016, + "step": 3639 + }, + { + "epoch": 2.907892107892108, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 3640 + }, + { + "epoch": 2.9086913086913087, + "grad_norm": 0.68359375, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 3641 + }, + { + "epoch": 2.9094905094905092, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 3642 + }, + { + "epoch": 2.9102897102897103, + "grad_norm": 0.61328125, + "learning_rate": 0.0002, + "loss": 0.9079, + "step": 3643 + }, + { + "epoch": 2.9110889110889113, + "grad_norm": 0.7578125, + "learning_rate": 0.0002, + "loss": 0.9133, + "step": 3644 + }, + { + "epoch": 2.911888111888112, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.9141, + "step": 3645 + }, + { + "epoch": 2.9126873126873125, + "grad_norm": 0.62890625, + "learning_rate": 0.0002, + "loss": 0.9046, + "step": 3646 + }, + { + "epoch": 2.9134865134865136, + "grad_norm": 1.0078125, + "learning_rate": 0.0002, + "loss": 0.9059, + "step": 3647 + }, + { + "epoch": 2.914285714285714, + "grad_norm": 0.84765625, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 3648 + }, + { + "epoch": 2.915084915084915, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.9023, + "step": 3649 + }, + { + "epoch": 2.915884115884116, + "grad_norm": 1.1640625, + "learning_rate": 0.0002, + "loss": 0.9074, + "step": 3650 + }, + { + "epoch": 2.916683316683317, + "grad_norm": 0.578125, + "learning_rate": 0.0002, + "loss": 0.9029, + "step": 3651 + }, + { + "epoch": 2.9174825174825174, + "grad_norm": 0.80859375, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 3652 + }, + { + "epoch": 2.918281718281718, + "grad_norm": 0.73828125, + "learning_rate": 0.0002, + "loss": 0.9092, + "step": 3653 + }, + { + "epoch": 2.919080919080919, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9059, + "step": 3654 + }, + { + "epoch": 2.91988011988012, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.908, + "step": 3655 + }, + { + "epoch": 2.9206793206793207, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.9093, + "step": 3656 + }, + { + "epoch": 2.9214785214785213, + "grad_norm": 0.63671875, + "learning_rate": 0.0002, + "loss": 0.9064, + "step": 3657 + }, + { + "epoch": 2.9222777222777223, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 3658 + }, + { + "epoch": 2.9230769230769234, + "grad_norm": 0.609375, + "learning_rate": 0.0002, + "loss": 0.9039, + "step": 3659 + }, + { + "epoch": 2.923876123876124, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9013, + "step": 3660 + }, + { + "epoch": 2.9246753246753245, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 0.9093, + "step": 3661 + }, + { + "epoch": 2.9254745254745256, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 3662 + }, + { + "epoch": 2.926273726273726, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9021, + "step": 3663 + }, + { + "epoch": 2.927072927072927, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9062, + "step": 3664 + }, + { + "epoch": 2.927872127872128, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.9062, + "step": 3665 + }, + { + "epoch": 2.928671328671329, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9024, + "step": 3666 + }, + { + "epoch": 2.9294705294705294, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.9048, + "step": 3667 + }, + { + "epoch": 2.93026973026973, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9078, + "step": 3668 + }, + { + "epoch": 2.931068931068931, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 3669 + }, + { + "epoch": 2.931868131868132, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.907, + "step": 3670 + }, + { + "epoch": 2.9326673326673327, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 3671 + }, + { + "epoch": 2.9334665334665333, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.902, + "step": 3672 + }, + { + "epoch": 2.9342657342657343, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.9129, + "step": 3673 + }, + { + "epoch": 2.935064935064935, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9062, + "step": 3674 + }, + { + "epoch": 2.935864135864136, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.909, + "step": 3675 + }, + { + "epoch": 2.9366633366633366, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9071, + "step": 3676 + }, + { + "epoch": 2.9374625374625376, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9117, + "step": 3677 + }, + { + "epoch": 2.938261738261738, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9068, + "step": 3678 + }, + { + "epoch": 2.939060939060939, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9019, + "step": 3679 + }, + { + "epoch": 2.93986013986014, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9143, + "step": 3680 + }, + { + "epoch": 2.940659340659341, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9029, + "step": 3681 + }, + { + "epoch": 2.9414585414585415, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9105, + "step": 3682 + }, + { + "epoch": 2.942257742257742, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.9029, + "step": 3683 + }, + { + "epoch": 2.943056943056943, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.9052, + "step": 3684 + }, + { + "epoch": 2.9438561438561437, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.9035, + "step": 3685 + }, + { + "epoch": 2.9446553446553447, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9011, + "step": 3686 + }, + { + "epoch": 2.9454545454545453, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9057, + "step": 3687 + }, + { + "epoch": 2.9462537462537464, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.904, + "step": 3688 + }, + { + "epoch": 2.947052947052947, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.8955, + "step": 3689 + }, + { + "epoch": 2.947852147852148, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.9096, + "step": 3690 + }, + { + "epoch": 2.9486513486513486, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9107, + "step": 3691 + }, + { + "epoch": 2.9494505494505496, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.9055, + "step": 3692 + }, + { + "epoch": 2.9502497502497502, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 3693 + }, + { + "epoch": 2.951048951048951, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 3694 + }, + { + "epoch": 2.951848151848152, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.8992, + "step": 3695 + }, + { + "epoch": 2.952647352647353, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.9106, + "step": 3696 + }, + { + "epoch": 2.9534465534465535, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.9026, + "step": 3697 + }, + { + "epoch": 2.954245754245754, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 3698 + }, + { + "epoch": 2.955044955044955, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.9052, + "step": 3699 + }, + { + "epoch": 2.9558441558441557, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.9048, + "step": 3700 + }, + { + "epoch": 2.9566433566433568, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.9026, + "step": 3701 + }, + { + "epoch": 2.9574425574425574, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.9125, + "step": 3702 + }, + { + "epoch": 2.9582417582417584, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9154, + "step": 3703 + }, + { + "epoch": 2.959040959040959, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 3704 + }, + { + "epoch": 2.9598401598401596, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.9034, + "step": 3705 + }, + { + "epoch": 2.9606393606393606, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.9047, + "step": 3706 + }, + { + "epoch": 2.9614385614385617, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.9101, + "step": 3707 + }, + { + "epoch": 2.9622377622377623, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.9159, + "step": 3708 + }, + { + "epoch": 2.963036963036963, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.9079, + "step": 3709 + }, + { + "epoch": 2.963836163836164, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.9142, + "step": 3710 + }, + { + "epoch": 2.9646353646353645, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.9083, + "step": 3711 + }, + { + "epoch": 2.9654345654345655, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.9094, + "step": 3712 + }, + { + "epoch": 2.966233766233766, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.9022, + "step": 3713 + }, + { + "epoch": 2.967032967032967, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.902, + "step": 3714 + }, + { + "epoch": 2.9678321678321677, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.9081, + "step": 3715 + }, + { + "epoch": 2.968631368631369, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.9016, + "step": 3716 + }, + { + "epoch": 2.9694305694305694, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.9103, + "step": 3717 + }, + { + "epoch": 2.9702297702297704, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.901, + "step": 3718 + }, + { + "epoch": 2.971028971028971, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9125, + "step": 3719 + }, + { + "epoch": 2.9718281718281716, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 3720 + }, + { + "epoch": 2.9726273726273726, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.902, + "step": 3721 + }, + { + "epoch": 2.9734265734265737, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.9096, + "step": 3722 + }, + { + "epoch": 2.9742257742257743, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.9038, + "step": 3723 + }, + { + "epoch": 2.975024975024975, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.9049, + "step": 3724 + }, + { + "epoch": 2.975824175824176, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.9097, + "step": 3725 + }, + { + "epoch": 2.9766233766233765, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9158, + "step": 3726 + }, + { + "epoch": 2.9774225774225775, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.9035, + "step": 3727 + }, + { + "epoch": 2.978221778221778, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.9076, + "step": 3728 + }, + { + "epoch": 2.979020979020979, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.9062, + "step": 3729 + }, + { + "epoch": 2.9798201798201798, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.907, + "step": 3730 + }, + { + "epoch": 2.9806193806193804, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.9123, + "step": 3731 + }, + { + "epoch": 2.9814185814185814, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.9077, + "step": 3732 + }, + { + "epoch": 2.9822177822177824, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.9026, + "step": 3733 + }, + { + "epoch": 2.983016983016983, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.9054, + "step": 3734 + }, + { + "epoch": 2.9838161838161836, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.9063, + "step": 3735 + }, + { + "epoch": 2.9846153846153847, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.9054, + "step": 3736 + }, + { + "epoch": 2.9854145854145853, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.9024, + "step": 3737 + }, + { + "epoch": 2.9862137862137863, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 3738 + }, + { + "epoch": 2.987012987012987, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 3739 + }, + { + "epoch": 2.987812187812188, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.9067, + "step": 3740 + }, + { + "epoch": 2.9886113886113885, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 3741 + }, + { + "epoch": 2.9894105894105896, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.9047, + "step": 3742 + }, + { + "epoch": 2.99020979020979, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.9002, + "step": 3743 + }, + { + "epoch": 2.991008991008991, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 3744 + }, + { + "epoch": 2.991808191808192, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.9013, + "step": 3745 + }, + { + "epoch": 2.9926073926073924, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.9149, + "step": 3746 + }, + { + "epoch": 2.9934065934065934, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.9081, + "step": 3747 + }, + { + "epoch": 2.9942057942057945, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.9065, + "step": 3748 + }, + { + "epoch": 2.995004995004995, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.9086, + "step": 3749 + }, + { + "epoch": 2.9958041958041957, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.9117, + "step": 3750 + }, + { + "epoch": 2.9966033966033967, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.904, + "step": 3751 + }, + { + "epoch": 2.9974025974025973, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.9008, + "step": 3752 + }, + { + "epoch": 2.9982017982017983, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.9092, + "step": 3753 + }, + { + "epoch": 2.999000999000999, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.9067, + "step": 3754 + }, + { + "epoch": 2.9998001998002, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.9064, + "step": 3755 + }, + { + "epoch": 3.0, + "grad_norm": 0.091796875, + "learning_rate": 0.0002, + "loss": 0.2235, + "step": 3756 + }, + { + "epoch": 3.0007992007992006, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.9013, + "step": 3757 + }, + { + "epoch": 3.0015984015984016, + "grad_norm": 0.2431640625, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 3758 + }, + { + "epoch": 3.0023976023976022, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 3759 + }, + { + "epoch": 3.0031968031968033, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.9078, + "step": 3760 + }, + { + "epoch": 3.003996003996004, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8979, + "step": 3761 + }, + { + "epoch": 3.004795204795205, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.9125, + "step": 3762 + }, + { + "epoch": 3.0055944055944055, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.9338, + "step": 3763 + }, + { + "epoch": 3.0063936063936065, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.913, + "step": 3764 + }, + { + "epoch": 3.007192807192807, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.9082, + "step": 3765 + }, + { + "epoch": 3.007992007992008, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.9012, + "step": 3766 + }, + { + "epoch": 3.0087912087912088, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.9078, + "step": 3767 + }, + { + "epoch": 3.00959040959041, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.9063, + "step": 3768 + }, + { + "epoch": 3.0103896103896104, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 3769 + }, + { + "epoch": 3.011188811188811, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.9052, + "step": 3770 + }, + { + "epoch": 3.011988011988012, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8964, + "step": 3771 + }, + { + "epoch": 3.0127872127872126, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.905, + "step": 3772 + }, + { + "epoch": 3.0135864135864137, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.9063, + "step": 3773 + }, + { + "epoch": 3.0143856143856143, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.904, + "step": 3774 + }, + { + "epoch": 3.0151848151848153, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.9016, + "step": 3775 + }, + { + "epoch": 3.015984015984016, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.9103, + "step": 3776 + }, + { + "epoch": 3.016783216783217, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9057, + "step": 3777 + }, + { + "epoch": 3.0175824175824175, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 3778 + }, + { + "epoch": 3.0183816183816186, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.904, + "step": 3779 + }, + { + "epoch": 3.019180819180819, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8969, + "step": 3780 + }, + { + "epoch": 3.01998001998002, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.9029, + "step": 3781 + }, + { + "epoch": 3.020779220779221, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.9073, + "step": 3782 + }, + { + "epoch": 3.0215784215784214, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8972, + "step": 3783 + }, + { + "epoch": 3.0223776223776224, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 3784 + }, + { + "epoch": 3.023176823176823, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.9119, + "step": 3785 + }, + { + "epoch": 3.023976023976024, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.9002, + "step": 3786 + }, + { + "epoch": 3.0247752247752246, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.9001, + "step": 3787 + }, + { + "epoch": 3.0255744255744257, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.9025, + "step": 3788 + }, + { + "epoch": 3.0263736263736263, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9034, + "step": 3789 + }, + { + "epoch": 3.0271728271728273, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.9021, + "step": 3790 + }, + { + "epoch": 3.027972027972028, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9096, + "step": 3791 + }, + { + "epoch": 3.028771228771229, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8997, + "step": 3792 + }, + { + "epoch": 3.0295704295704295, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9137, + "step": 3793 + }, + { + "epoch": 3.0303696303696306, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8992, + "step": 3794 + }, + { + "epoch": 3.031168831168831, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.9024, + "step": 3795 + }, + { + "epoch": 3.0319680319680318, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.9032, + "step": 3796 + }, + { + "epoch": 3.032767232767233, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.9047, + "step": 3797 + }, + { + "epoch": 3.0335664335664334, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 3798 + }, + { + "epoch": 3.0343656343656344, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.9093, + "step": 3799 + }, + { + "epoch": 3.035164835164835, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9037, + "step": 3800 + }, + { + "epoch": 3.035964035964036, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9078, + "step": 3801 + }, + { + "epoch": 3.0367632367632367, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.8978, + "step": 3802 + }, + { + "epoch": 3.0375624375624377, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.9019, + "step": 3803 + }, + { + "epoch": 3.0383616383616383, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9087, + "step": 3804 + }, + { + "epoch": 3.0391608391608393, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9103, + "step": 3805 + }, + { + "epoch": 3.03996003996004, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.9038, + "step": 3806 + }, + { + "epoch": 3.040759240759241, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9022, + "step": 3807 + }, + { + "epoch": 3.0415584415584416, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9098, + "step": 3808 + }, + { + "epoch": 3.042357642357642, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9072, + "step": 3809 + }, + { + "epoch": 3.043156843156843, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9029, + "step": 3810 + }, + { + "epoch": 3.043956043956044, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9008, + "step": 3811 + }, + { + "epoch": 3.044755244755245, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.905, + "step": 3812 + }, + { + "epoch": 3.0455544455544454, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9057, + "step": 3813 + }, + { + "epoch": 3.0463536463536465, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9092, + "step": 3814 + }, + { + "epoch": 3.047152847152847, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 3815 + }, + { + "epoch": 3.047952047952048, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.9038, + "step": 3816 + }, + { + "epoch": 3.0487512487512487, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 3817 + }, + { + "epoch": 3.0495504495504497, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.899, + "step": 3818 + }, + { + "epoch": 3.0503496503496503, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 3819 + }, + { + "epoch": 3.0511488511488514, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 3820 + }, + { + "epoch": 3.051948051948052, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.9078, + "step": 3821 + }, + { + "epoch": 3.0527472527472526, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9124, + "step": 3822 + }, + { + "epoch": 3.0535464535464536, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9084, + "step": 3823 + }, + { + "epoch": 3.054345654345654, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.8965, + "step": 3824 + }, + { + "epoch": 3.0551448551448552, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.8995, + "step": 3825 + }, + { + "epoch": 3.055944055944056, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9084, + "step": 3826 + }, + { + "epoch": 3.056743256743257, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.9105, + "step": 3827 + }, + { + "epoch": 3.0575424575424575, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.908, + "step": 3828 + }, + { + "epoch": 3.0583416583416585, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.8983, + "step": 3829 + }, + { + "epoch": 3.059140859140859, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9006, + "step": 3830 + }, + { + "epoch": 3.05994005994006, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 3831 + }, + { + "epoch": 3.0607392607392607, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.9051, + "step": 3832 + }, + { + "epoch": 3.0615384615384613, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.908, + "step": 3833 + }, + { + "epoch": 3.0623376623376624, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.9118, + "step": 3834 + }, + { + "epoch": 3.063136863136863, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 3835 + }, + { + "epoch": 3.063936063936064, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.9059, + "step": 3836 + }, + { + "epoch": 3.0647352647352646, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9118, + "step": 3837 + }, + { + "epoch": 3.0655344655344656, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.9027, + "step": 3838 + }, + { + "epoch": 3.066333666333666, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 3839 + }, + { + "epoch": 3.0671328671328673, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.8998, + "step": 3840 + }, + { + "epoch": 3.067932067932068, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.9081, + "step": 3841 + }, + { + "epoch": 3.068731268731269, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.9061, + "step": 3842 + }, + { + "epoch": 3.0695304695304695, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.9033, + "step": 3843 + }, + { + "epoch": 3.0703296703296705, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 3844 + }, + { + "epoch": 3.071128871128871, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.9039, + "step": 3845 + }, + { + "epoch": 3.071928071928072, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 3846 + }, + { + "epoch": 3.0727272727272728, + "grad_norm": 0.60546875, + "learning_rate": 0.0002, + "loss": 0.9098, + "step": 3847 + }, + { + "epoch": 3.0735264735264733, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9038, + "step": 3848 + }, + { + "epoch": 3.0743256743256744, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.9039, + "step": 3849 + }, + { + "epoch": 3.075124875124875, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9121, + "step": 3850 + }, + { + "epoch": 3.075924075924076, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 3851 + }, + { + "epoch": 3.0767232767232766, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9017, + "step": 3852 + }, + { + "epoch": 3.0775224775224777, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.9036, + "step": 3853 + }, + { + "epoch": 3.0783216783216782, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9013, + "step": 3854 + }, + { + "epoch": 3.0791208791208793, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.9051, + "step": 3855 + }, + { + "epoch": 3.07992007992008, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9064, + "step": 3856 + }, + { + "epoch": 3.080719280719281, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9047, + "step": 3857 + }, + { + "epoch": 3.0815184815184815, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.9066, + "step": 3858 + }, + { + "epoch": 3.082317682317682, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9025, + "step": 3859 + }, + { + "epoch": 3.083116883116883, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9027, + "step": 3860 + }, + { + "epoch": 3.0839160839160837, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.906, + "step": 3861 + }, + { + "epoch": 3.0847152847152848, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9042, + "step": 3862 + }, + { + "epoch": 3.0855144855144854, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9062, + "step": 3863 + }, + { + "epoch": 3.0863136863136864, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9104, + "step": 3864 + }, + { + "epoch": 3.087112887112887, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9097, + "step": 3865 + }, + { + "epoch": 3.087912087912088, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.8993, + "step": 3866 + }, + { + "epoch": 3.0887112887112886, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.9022, + "step": 3867 + }, + { + "epoch": 3.0895104895104897, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.8994, + "step": 3868 + }, + { + "epoch": 3.0903096903096903, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.9021, + "step": 3869 + }, + { + "epoch": 3.0911088911088913, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.907, + "step": 3870 + }, + { + "epoch": 3.091908091908092, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.8985, + "step": 3871 + }, + { + "epoch": 3.0927072927072925, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.9033, + "step": 3872 + }, + { + "epoch": 3.0935064935064935, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9078, + "step": 3873 + }, + { + "epoch": 3.094305694305694, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9138, + "step": 3874 + }, + { + "epoch": 3.095104895104895, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.8889, + "step": 3875 + }, + { + "epoch": 3.0959040959040958, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.9123, + "step": 3876 + }, + { + "epoch": 3.096703296703297, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9041, + "step": 3877 + }, + { + "epoch": 3.0975024975024974, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.9024, + "step": 3878 + }, + { + "epoch": 3.0983016983016984, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.906, + "step": 3879 + }, + { + "epoch": 3.099100899100899, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8995, + "step": 3880 + }, + { + "epoch": 3.0999000999001, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9098, + "step": 3881 + }, + { + "epoch": 3.1006993006993007, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9003, + "step": 3882 + }, + { + "epoch": 3.1014985014985017, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9053, + "step": 3883 + }, + { + "epoch": 3.1022977022977023, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 3884 + }, + { + "epoch": 3.103096903096903, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.897, + "step": 3885 + }, + { + "epoch": 3.103896103896104, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9046, + "step": 3886 + }, + { + "epoch": 3.1046953046953045, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 3887 + }, + { + "epoch": 3.1054945054945056, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.8985, + "step": 3888 + }, + { + "epoch": 3.106293706293706, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.9048, + "step": 3889 + }, + { + "epoch": 3.107092907092907, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9063, + "step": 3890 + }, + { + "epoch": 3.107892107892108, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9038, + "step": 3891 + }, + { + "epoch": 3.108691308691309, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.9069, + "step": 3892 + }, + { + "epoch": 3.1094905094905094, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.9158, + "step": 3893 + }, + { + "epoch": 3.1102897102897105, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9141, + "step": 3894 + }, + { + "epoch": 3.111088911088911, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.9052, + "step": 3895 + }, + { + "epoch": 3.111888111888112, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 3896 + }, + { + "epoch": 3.1126873126873127, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.9022, + "step": 3897 + }, + { + "epoch": 3.1134865134865133, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.8997, + "step": 3898 + }, + { + "epoch": 3.1142857142857143, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.9065, + "step": 3899 + }, + { + "epoch": 3.115084915084915, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.897, + "step": 3900 + }, + { + "epoch": 3.115884115884116, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.9084, + "step": 3901 + }, + { + "epoch": 3.1166833166833166, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.8978, + "step": 3902 + }, + { + "epoch": 3.1174825174825176, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 3903 + }, + { + "epoch": 3.118281718281718, + "grad_norm": 0.6796875, + "learning_rate": 0.0002, + "loss": 0.9041, + "step": 3904 + }, + { + "epoch": 3.1190809190809192, + "grad_norm": 0.625, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 3905 + }, + { + "epoch": 3.11988011988012, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.905, + "step": 3906 + }, + { + "epoch": 3.120679320679321, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9052, + "step": 3907 + }, + { + "epoch": 3.1214785214785215, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.9074, + "step": 3908 + }, + { + "epoch": 3.1222777222777225, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9119, + "step": 3909 + }, + { + "epoch": 3.123076923076923, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.9027, + "step": 3910 + }, + { + "epoch": 3.1238761238761237, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9056, + "step": 3911 + }, + { + "epoch": 3.1246753246753247, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 3912 + }, + { + "epoch": 3.1254745254745253, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9026, + "step": 3913 + }, + { + "epoch": 3.1262737262737263, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.907, + "step": 3914 + }, + { + "epoch": 3.127072927072927, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 3915 + }, + { + "epoch": 3.127872127872128, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.9012, + "step": 3916 + }, + { + "epoch": 3.1286713286713286, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.9027, + "step": 3917 + }, + { + "epoch": 3.1294705294705296, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.8977, + "step": 3918 + }, + { + "epoch": 3.13026973026973, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 3919 + }, + { + "epoch": 3.1310689310689312, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.896, + "step": 3920 + }, + { + "epoch": 3.131868131868132, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.9052, + "step": 3921 + }, + { + "epoch": 3.1326673326673324, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.9017, + "step": 3922 + }, + { + "epoch": 3.1334665334665335, + "grad_norm": 0.625, + "learning_rate": 0.0002, + "loss": 0.9057, + "step": 3923 + }, + { + "epoch": 3.134265734265734, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.9017, + "step": 3924 + }, + { + "epoch": 3.135064935064935, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.9006, + "step": 3925 + }, + { + "epoch": 3.1358641358641357, + "grad_norm": 0.5859375, + "learning_rate": 0.0002, + "loss": 0.9044, + "step": 3926 + }, + { + "epoch": 3.1366633366633367, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9057, + "step": 3927 + }, + { + "epoch": 3.1374625374625373, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.9084, + "step": 3928 + }, + { + "epoch": 3.1382617382617384, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 3929 + }, + { + "epoch": 3.139060939060939, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.8974, + "step": 3930 + }, + { + "epoch": 3.13986013986014, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9042, + "step": 3931 + }, + { + "epoch": 3.1406593406593406, + "grad_norm": 0.6171875, + "learning_rate": 0.0002, + "loss": 0.9015, + "step": 3932 + }, + { + "epoch": 3.1414585414585416, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.9018, + "step": 3933 + }, + { + "epoch": 3.1422577422577422, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.9032, + "step": 3934 + }, + { + "epoch": 3.1430569430569433, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9094, + "step": 3935 + }, + { + "epoch": 3.143856143856144, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.9043, + "step": 3936 + }, + { + "epoch": 3.1446553446553445, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9082, + "step": 3937 + }, + { + "epoch": 3.1454545454545455, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 3938 + }, + { + "epoch": 3.146253746253746, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.8997, + "step": 3939 + }, + { + "epoch": 3.147052947052947, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.9047, + "step": 3940 + }, + { + "epoch": 3.1478521478521477, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9117, + "step": 3941 + }, + { + "epoch": 3.1486513486513488, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.904, + "step": 3942 + }, + { + "epoch": 3.1494505494505494, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9071, + "step": 3943 + }, + { + "epoch": 3.1502497502497504, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9092, + "step": 3944 + }, + { + "epoch": 3.151048951048951, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9075, + "step": 3945 + }, + { + "epoch": 3.151848151848152, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.9046, + "step": 3946 + }, + { + "epoch": 3.1526473526473526, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9037, + "step": 3947 + }, + { + "epoch": 3.1534465534465532, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.899, + "step": 3948 + }, + { + "epoch": 3.1542457542457543, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.899, + "step": 3949 + }, + { + "epoch": 3.155044955044955, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.8966, + "step": 3950 + }, + { + "epoch": 3.155844155844156, + "grad_norm": 1.265625, + "learning_rate": 0.0002, + "loss": 0.9416, + "step": 3951 + }, + { + "epoch": 3.1566433566433565, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 3952 + }, + { + "epoch": 3.1574425574425575, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9036, + "step": 3953 + }, + { + "epoch": 3.158241758241758, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.9051, + "step": 3954 + }, + { + "epoch": 3.159040959040959, + "grad_norm": 1.453125, + "learning_rate": 0.0002, + "loss": 0.9249, + "step": 3955 + }, + { + "epoch": 3.1598401598401598, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9126, + "step": 3956 + }, + { + "epoch": 3.160639360639361, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 3957 + }, + { + "epoch": 3.1614385614385614, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.9032, + "step": 3958 + }, + { + "epoch": 3.1622377622377624, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8973, + "step": 3959 + }, + { + "epoch": 3.163036963036963, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.9055, + "step": 3960 + }, + { + "epoch": 3.163836163836164, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8992, + "step": 3961 + }, + { + "epoch": 3.1646353646353647, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.9075, + "step": 3962 + }, + { + "epoch": 3.1654345654345653, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.9101, + "step": 3963 + }, + { + "epoch": 3.1662337662337663, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 3964 + }, + { + "epoch": 3.167032967032967, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.9037, + "step": 3965 + }, + { + "epoch": 3.167832167832168, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.9018, + "step": 3966 + }, + { + "epoch": 3.1686313686313685, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 3967 + }, + { + "epoch": 3.1694305694305696, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.9023, + "step": 3968 + }, + { + "epoch": 3.17022977022977, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.9126, + "step": 3969 + }, + { + "epoch": 3.171028971028971, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8983, + "step": 3970 + }, + { + "epoch": 3.171828171828172, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.9094, + "step": 3971 + }, + { + "epoch": 3.172627372627373, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.904, + "step": 3972 + }, + { + "epoch": 3.1734265734265734, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.9044, + "step": 3973 + }, + { + "epoch": 3.174225774225774, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8991, + "step": 3974 + }, + { + "epoch": 3.175024975024975, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.9039, + "step": 3975 + }, + { + "epoch": 3.1758241758241756, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.9042, + "step": 3976 + }, + { + "epoch": 3.1766233766233767, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.9111, + "step": 3977 + }, + { + "epoch": 3.1774225774225773, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.9005, + "step": 3978 + }, + { + "epoch": 3.1782217782217783, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 3979 + }, + { + "epoch": 3.179020979020979, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.9036, + "step": 3980 + }, + { + "epoch": 3.17982017982018, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.9121, + "step": 3981 + }, + { + "epoch": 3.1806193806193805, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.9038, + "step": 3982 + }, + { + "epoch": 3.1814185814185816, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.9076, + "step": 3983 + }, + { + "epoch": 3.182217782217782, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.9046, + "step": 3984 + }, + { + "epoch": 3.183016983016983, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.9032, + "step": 3985 + }, + { + "epoch": 3.183816183816184, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.9043, + "step": 3986 + }, + { + "epoch": 3.184615384615385, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.9101, + "step": 3987 + }, + { + "epoch": 3.1854145854145854, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.9084, + "step": 3988 + }, + { + "epoch": 3.186213786213786, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 3989 + }, + { + "epoch": 3.187012987012987, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.908, + "step": 3990 + }, + { + "epoch": 3.1878121878121877, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8973, + "step": 3991 + }, + { + "epoch": 3.1886113886113887, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.9082, + "step": 3992 + }, + { + "epoch": 3.1894105894105893, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.9007, + "step": 3993 + }, + { + "epoch": 3.1902097902097903, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.9067, + "step": 3994 + }, + { + "epoch": 3.191008991008991, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8961, + "step": 3995 + }, + { + "epoch": 3.191808191808192, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.9046, + "step": 3996 + }, + { + "epoch": 3.1926073926073926, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 3997 + }, + { + "epoch": 3.1934065934065936, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.9032, + "step": 3998 + }, + { + "epoch": 3.194205794205794, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.9068, + "step": 3999 + }, + { + "epoch": 3.195004995004995, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8906, + "step": 4000 + }, + { + "epoch": 3.195804195804196, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.9011, + "step": 4001 + }, + { + "epoch": 3.1966033966033964, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.9029, + "step": 4002 + }, + { + "epoch": 3.1974025974025975, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 4003 + }, + { + "epoch": 3.198201798201798, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.9105, + "step": 4004 + }, + { + "epoch": 3.199000999000999, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 4005 + }, + { + "epoch": 3.1998001998001997, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.9085, + "step": 4006 + }, + { + "epoch": 3.2005994005994007, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.913, + "step": 4007 + }, + { + "epoch": 3.2013986013986013, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.9065, + "step": 4008 + }, + { + "epoch": 3.2021978021978024, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.9026, + "step": 4009 + }, + { + "epoch": 3.202997002997003, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8964, + "step": 4010 + }, + { + "epoch": 3.203796203796204, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.9025, + "step": 4011 + }, + { + "epoch": 3.2045954045954046, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 4012 + }, + { + "epoch": 3.205394605394605, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.9133, + "step": 4013 + }, + { + "epoch": 3.2061938061938062, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.9042, + "step": 4014 + }, + { + "epoch": 3.206993006993007, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.9021, + "step": 4015 + }, + { + "epoch": 3.207792207792208, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 4016 + }, + { + "epoch": 3.2085914085914085, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.9045, + "step": 4017 + }, + { + "epoch": 3.2093906093906095, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.9122, + "step": 4018 + }, + { + "epoch": 3.21018981018981, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8965, + "step": 4019 + }, + { + "epoch": 3.210989010989011, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.9071, + "step": 4020 + }, + { + "epoch": 3.2117882117882117, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.9136, + "step": 4021 + }, + { + "epoch": 3.2125874125874128, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.902, + "step": 4022 + }, + { + "epoch": 3.2133866133866134, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.9025, + "step": 4023 + }, + { + "epoch": 3.2141858141858144, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.9099, + "step": 4024 + }, + { + "epoch": 3.214985014985015, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 4025 + }, + { + "epoch": 3.2157842157842156, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8977, + "step": 4026 + }, + { + "epoch": 3.2165834165834166, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.9045, + "step": 4027 + }, + { + "epoch": 3.217382617382617, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 4028 + }, + { + "epoch": 3.2181818181818183, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8957, + "step": 4029 + }, + { + "epoch": 3.218981018981019, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9075, + "step": 4030 + }, + { + "epoch": 3.21978021978022, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.8973, + "step": 4031 + }, + { + "epoch": 3.2205794205794205, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.9104, + "step": 4032 + }, + { + "epoch": 3.2213786213786215, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.9048, + "step": 4033 + }, + { + "epoch": 3.222177822177822, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.9055, + "step": 4034 + }, + { + "epoch": 3.222977022977023, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 4035 + }, + { + "epoch": 3.2237762237762237, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.9128, + "step": 4036 + }, + { + "epoch": 3.2245754245754243, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 4037 + }, + { + "epoch": 3.2253746253746254, + "grad_norm": 0.8203125, + "learning_rate": 0.0002, + "loss": 0.9145, + "step": 4038 + }, + { + "epoch": 3.226173826173826, + "grad_norm": 1.046875, + "learning_rate": 0.0002, + "loss": 0.902, + "step": 4039 + }, + { + "epoch": 3.226973026973027, + "grad_norm": 1.40625, + "learning_rate": 0.0002, + "loss": 0.9043, + "step": 4040 + }, + { + "epoch": 3.2277722277722276, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.9116, + "step": 4041 + }, + { + "epoch": 3.2285714285714286, + "grad_norm": 1.4921875, + "learning_rate": 0.0002, + "loss": 0.9017, + "step": 4042 + }, + { + "epoch": 3.2293706293706292, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.9082, + "step": 4043 + }, + { + "epoch": 3.2301698301698303, + "grad_norm": 1.2109375, + "learning_rate": 0.0002, + "loss": 0.9003, + "step": 4044 + }, + { + "epoch": 3.230969030969031, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9021, + "step": 4045 + }, + { + "epoch": 3.231768231768232, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.901, + "step": 4046 + }, + { + "epoch": 3.2325674325674325, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.9029, + "step": 4047 + }, + { + "epoch": 3.2333666333666335, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9018, + "step": 4048 + }, + { + "epoch": 3.234165834165834, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.9066, + "step": 4049 + }, + { + "epoch": 3.234965034965035, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.8969, + "step": 4050 + }, + { + "epoch": 3.2357642357642358, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.9, + "step": 4051 + }, + { + "epoch": 3.2365634365634364, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.9041, + "step": 4052 + }, + { + "epoch": 3.2373626373626374, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 4053 + }, + { + "epoch": 3.238161838161838, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.9059, + "step": 4054 + }, + { + "epoch": 3.238961038961039, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.9076, + "step": 4055 + }, + { + "epoch": 3.2397602397602396, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 4056 + }, + { + "epoch": 3.2405594405594407, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.9062, + "step": 4057 + }, + { + "epoch": 3.2413586413586413, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.9029, + "step": 4058 + }, + { + "epoch": 3.2421578421578423, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 4059 + }, + { + "epoch": 3.242957042957043, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.9022, + "step": 4060 + }, + { + "epoch": 3.243756243756244, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.9072, + "step": 4061 + }, + { + "epoch": 3.2445554445554445, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 4062 + }, + { + "epoch": 3.245354645354645, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.9035, + "step": 4063 + }, + { + "epoch": 3.246153846153846, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 4064 + }, + { + "epoch": 3.2469530469530468, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.907, + "step": 4065 + }, + { + "epoch": 3.247752247752248, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8987, + "step": 4066 + }, + { + "epoch": 3.2485514485514484, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.9091, + "step": 4067 + }, + { + "epoch": 3.2493506493506494, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.902, + "step": 4068 + }, + { + "epoch": 3.25014985014985, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 4069 + }, + { + "epoch": 3.250949050949051, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.9067, + "step": 4070 + }, + { + "epoch": 3.2517482517482517, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 4071 + }, + { + "epoch": 3.2525474525474527, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.897, + "step": 4072 + }, + { + "epoch": 3.2533466533466533, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.9094, + "step": 4073 + }, + { + "epoch": 3.2541458541458543, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 4074 + }, + { + "epoch": 3.254945054945055, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8994, + "step": 4075 + }, + { + "epoch": 3.255744255744256, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.9023, + "step": 4076 + }, + { + "epoch": 3.2565434565434566, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 4077 + }, + { + "epoch": 3.257342657342657, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 4078 + }, + { + "epoch": 3.258141858141858, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.9038, + "step": 4079 + }, + { + "epoch": 3.258941058941059, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8968, + "step": 4080 + }, + { + "epoch": 3.25974025974026, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.9009, + "step": 4081 + }, + { + "epoch": 3.2605394605394604, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 4082 + }, + { + "epoch": 3.2613386613386615, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8989, + "step": 4083 + }, + { + "epoch": 3.262137862137862, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.9037, + "step": 4084 + }, + { + "epoch": 3.262937062937063, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.9086, + "step": 4085 + }, + { + "epoch": 3.2637362637362637, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.9059, + "step": 4086 + }, + { + "epoch": 3.2645354645354647, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.9023, + "step": 4087 + }, + { + "epoch": 3.2653346653346653, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 4088 + }, + { + "epoch": 3.266133866133866, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.905, + "step": 4089 + }, + { + "epoch": 3.266933066933067, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.9011, + "step": 4090 + }, + { + "epoch": 3.2677322677322675, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 4091 + }, + { + "epoch": 3.2685314685314686, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.9042, + "step": 4092 + }, + { + "epoch": 3.269330669330669, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.9009, + "step": 4093 + }, + { + "epoch": 3.27012987012987, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.9036, + "step": 4094 + }, + { + "epoch": 3.270929070929071, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.908, + "step": 4095 + }, + { + "epoch": 3.271728271728272, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.9006, + "step": 4096 + }, + { + "epoch": 3.2725274725274724, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.9064, + "step": 4097 + }, + { + "epoch": 3.2733266733266735, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 4098 + }, + { + "epoch": 3.274125874125874, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.9027, + "step": 4099 + }, + { + "epoch": 3.274925074925075, + "grad_norm": 1.5078125, + "learning_rate": 0.0002, + "loss": 0.9053, + "step": 4100 + }, + { + "epoch": 3.2757242757242757, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9063, + "step": 4101 + }, + { + "epoch": 3.2765234765234768, + "grad_norm": 0.25390625, + "learning_rate": 0.0002, + "loss": 0.9052, + "step": 4102 + }, + { + "epoch": 3.2773226773226773, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.9003, + "step": 4103 + }, + { + "epoch": 3.278121878121878, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.9068, + "step": 4104 + }, + { + "epoch": 3.278921078921079, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 4105 + }, + { + "epoch": 3.2797202797202796, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 4106 + }, + { + "epoch": 3.2805194805194806, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 4107 + }, + { + "epoch": 3.281318681318681, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.901, + "step": 4108 + }, + { + "epoch": 3.2821178821178822, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.899, + "step": 4109 + }, + { + "epoch": 3.282917082917083, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.9117, + "step": 4110 + }, + { + "epoch": 3.283716283716284, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.9035, + "step": 4111 + }, + { + "epoch": 3.2845154845154845, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.9026, + "step": 4112 + }, + { + "epoch": 3.2853146853146855, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8942, + "step": 4113 + }, + { + "epoch": 3.286113886113886, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.9003, + "step": 4114 + }, + { + "epoch": 3.2869130869130867, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 4115 + }, + { + "epoch": 3.2877122877122877, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 4116 + }, + { + "epoch": 3.2885114885114883, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.9013, + "step": 4117 + }, + { + "epoch": 3.2893106893106894, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.9055, + "step": 4118 + }, + { + "epoch": 3.29010989010989, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.9018, + "step": 4119 + }, + { + "epoch": 3.290909090909091, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.9009, + "step": 4120 + }, + { + "epoch": 3.2917082917082916, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 4121 + }, + { + "epoch": 3.2925074925074926, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.9044, + "step": 4122 + }, + { + "epoch": 3.2933066933066932, + "grad_norm": 0.640625, + "learning_rate": 0.0002, + "loss": 0.9154, + "step": 4123 + }, + { + "epoch": 3.2941058941058943, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.9081, + "step": 4124 + }, + { + "epoch": 3.294905094905095, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.9019, + "step": 4125 + }, + { + "epoch": 3.2957042957042955, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.9038, + "step": 4126 + }, + { + "epoch": 3.2965034965034965, + "grad_norm": 0.6171875, + "learning_rate": 0.0002, + "loss": 0.9011, + "step": 4127 + }, + { + "epoch": 3.2973026973026975, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.8992, + "step": 4128 + }, + { + "epoch": 3.298101898101898, + "grad_norm": 0.6875, + "learning_rate": 0.0002, + "loss": 0.9007, + "step": 4129 + }, + { + "epoch": 3.2989010989010987, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9101, + "step": 4130 + }, + { + "epoch": 3.2997002997002998, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.8957, + "step": 4131 + }, + { + "epoch": 3.3004995004995004, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.9122, + "step": 4132 + }, + { + "epoch": 3.3012987012987014, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 4133 + }, + { + "epoch": 3.302097902097902, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.903, + "step": 4134 + }, + { + "epoch": 3.302897102897103, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.9017, + "step": 4135 + }, + { + "epoch": 3.3036963036963036, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.9017, + "step": 4136 + }, + { + "epoch": 3.3044955044955047, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9108, + "step": 4137 + }, + { + "epoch": 3.3052947052947053, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 4138 + }, + { + "epoch": 3.3060939060939063, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.949, + "step": 4139 + }, + { + "epoch": 3.306893106893107, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.8993, + "step": 4140 + }, + { + "epoch": 3.3076923076923075, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9122, + "step": 4141 + }, + { + "epoch": 3.3084915084915085, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9007, + "step": 4142 + }, + { + "epoch": 3.309290709290709, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.9059, + "step": 4143 + }, + { + "epoch": 3.31008991008991, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.8987, + "step": 4144 + }, + { + "epoch": 3.3108891108891108, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 4145 + }, + { + "epoch": 3.311688311688312, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.899, + "step": 4146 + }, + { + "epoch": 3.3124875124875124, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9009, + "step": 4147 + }, + { + "epoch": 3.3132867132867134, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 4148 + }, + { + "epoch": 3.314085914085914, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9104, + "step": 4149 + }, + { + "epoch": 3.314885114885115, + "grad_norm": 0.9921875, + "learning_rate": 0.0002, + "loss": 0.9363, + "step": 4150 + }, + { + "epoch": 3.3156843156843157, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9042, + "step": 4151 + }, + { + "epoch": 3.3164835164835162, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.9007, + "step": 4152 + }, + { + "epoch": 3.3172827172827173, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 4153 + }, + { + "epoch": 3.3180819180819183, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.8994, + "step": 4154 + }, + { + "epoch": 3.318881118881119, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.9002, + "step": 4155 + }, + { + "epoch": 3.3196803196803195, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.9088, + "step": 4156 + }, + { + "epoch": 3.3204795204795206, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.8911, + "step": 4157 + }, + { + "epoch": 3.321278721278721, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 4158 + }, + { + "epoch": 3.322077922077922, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 4159 + }, + { + "epoch": 3.322877122877123, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 4160 + }, + { + "epoch": 3.323676323676324, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.8998, + "step": 4161 + }, + { + "epoch": 3.3244755244755244, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9056, + "step": 4162 + }, + { + "epoch": 3.3252747252747255, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9031, + "step": 4163 + }, + { + "epoch": 3.326073926073926, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9, + "step": 4164 + }, + { + "epoch": 3.326873126873127, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9051, + "step": 4165 + }, + { + "epoch": 3.3276723276723277, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.8965, + "step": 4166 + }, + { + "epoch": 3.3284715284715283, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 4167 + }, + { + "epoch": 3.3292707292707293, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 4168 + }, + { + "epoch": 3.33006993006993, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 4169 + }, + { + "epoch": 3.330869130869131, + "grad_norm": 0.8515625, + "learning_rate": 0.0002, + "loss": 0.9158, + "step": 4170 + }, + { + "epoch": 3.3316683316683315, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9063, + "step": 4171 + }, + { + "epoch": 3.3324675324675326, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 4172 + }, + { + "epoch": 3.333266733266733, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.8989, + "step": 4173 + }, + { + "epoch": 3.334065934065934, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.897, + "step": 4174 + }, + { + "epoch": 3.334865134865135, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.896, + "step": 4175 + }, + { + "epoch": 3.335664335664336, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 4176 + }, + { + "epoch": 3.3364635364635364, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9044, + "step": 4177 + }, + { + "epoch": 3.337262737262737, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.907, + "step": 4178 + }, + { + "epoch": 3.338061938061938, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.8909, + "step": 4179 + }, + { + "epoch": 3.338861138861139, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.9008, + "step": 4180 + }, + { + "epoch": 3.3396603396603397, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.8999, + "step": 4181 + }, + { + "epoch": 3.3404595404595403, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9012, + "step": 4182 + }, + { + "epoch": 3.3412587412587413, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 4183 + }, + { + "epoch": 3.342057942057942, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 4184 + }, + { + "epoch": 3.342857142857143, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.898, + "step": 4185 + }, + { + "epoch": 3.3436563436563436, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.9088, + "step": 4186 + }, + { + "epoch": 3.3444555444555446, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9, + "step": 4187 + }, + { + "epoch": 3.345254745254745, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.9034, + "step": 4188 + }, + { + "epoch": 3.3460539460539462, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9075, + "step": 4189 + }, + { + "epoch": 3.346853146853147, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.8989, + "step": 4190 + }, + { + "epoch": 3.347652347652348, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.8993, + "step": 4191 + }, + { + "epoch": 3.3484515484515485, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 4192 + }, + { + "epoch": 3.349250749250749, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 4193 + }, + { + "epoch": 3.35004995004995, + "grad_norm": 0.5859375, + "learning_rate": 0.0002, + "loss": 0.9133, + "step": 4194 + }, + { + "epoch": 3.3508491508491507, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 4195 + }, + { + "epoch": 3.3516483516483517, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.9031, + "step": 4196 + }, + { + "epoch": 3.3524475524475523, + "grad_norm": 0.65625, + "learning_rate": 0.0002, + "loss": 0.9076, + "step": 4197 + }, + { + "epoch": 3.3532467532467534, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9037, + "step": 4198 + }, + { + "epoch": 3.354045954045954, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.8992, + "step": 4199 + }, + { + "epoch": 3.354845154845155, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.8994, + "step": 4200 + }, + { + "epoch": 3.3556443556443556, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.9178, + "step": 4201 + }, + { + "epoch": 3.3564435564435566, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.8987, + "step": 4202 + }, + { + "epoch": 3.3572427572427572, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.897, + "step": 4203 + }, + { + "epoch": 3.358041958041958, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 4204 + }, + { + "epoch": 3.358841158841159, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.9094, + "step": 4205 + }, + { + "epoch": 3.3596403596403595, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.9052, + "step": 4206 + }, + { + "epoch": 3.3604395604395605, + "grad_norm": 0.578125, + "learning_rate": 0.0002, + "loss": 0.9018, + "step": 4207 + }, + { + "epoch": 3.361238761238761, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 4208 + }, + { + "epoch": 3.362037962037962, + "grad_norm": 0.578125, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 4209 + }, + { + "epoch": 3.3628371628371627, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.9042, + "step": 4210 + }, + { + "epoch": 3.3636363636363638, + "grad_norm": 0.6640625, + "learning_rate": 0.0002, + "loss": 0.9185, + "step": 4211 + }, + { + "epoch": 3.3644355644355644, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9034, + "step": 4212 + }, + { + "epoch": 3.3652347652347654, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.8998, + "step": 4213 + }, + { + "epoch": 3.366033966033966, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 4214 + }, + { + "epoch": 3.366833166833167, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 4215 + }, + { + "epoch": 3.3676323676323676, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.9025, + "step": 4216 + }, + { + "epoch": 3.3684315684315687, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.8999, + "step": 4217 + }, + { + "epoch": 3.3692307692307693, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9014, + "step": 4218 + }, + { + "epoch": 3.37002997002997, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9104, + "step": 4219 + }, + { + "epoch": 3.370829170829171, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 4220 + }, + { + "epoch": 3.3716283716283715, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 4221 + }, + { + "epoch": 3.3724275724275725, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.8983, + "step": 4222 + }, + { + "epoch": 3.373226773226773, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9031, + "step": 4223 + }, + { + "epoch": 3.374025974025974, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.8985, + "step": 4224 + }, + { + "epoch": 3.3748251748251747, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 4225 + }, + { + "epoch": 3.375624375624376, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 4226 + }, + { + "epoch": 3.3764235764235764, + "grad_norm": 0.59765625, + "learning_rate": 0.0002, + "loss": 0.901, + "step": 4227 + }, + { + "epoch": 3.3772227772227774, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 4228 + }, + { + "epoch": 3.378021978021978, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.9008, + "step": 4229 + }, + { + "epoch": 3.3788211788211786, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.8969, + "step": 4230 + }, + { + "epoch": 3.3796203796203796, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.909, + "step": 4231 + }, + { + "epoch": 3.3804195804195802, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.9018, + "step": 4232 + }, + { + "epoch": 3.3812187812187813, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.909, + "step": 4233 + }, + { + "epoch": 3.382017982017982, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.9003, + "step": 4234 + }, + { + "epoch": 3.382817182817183, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9083, + "step": 4235 + }, + { + "epoch": 3.3836163836163835, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.8917, + "step": 4236 + }, + { + "epoch": 3.3844155844155845, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.8983, + "step": 4237 + }, + { + "epoch": 3.385214785214785, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.8974, + "step": 4238 + }, + { + "epoch": 3.386013986013986, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 4239 + }, + { + "epoch": 3.3868131868131868, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9015, + "step": 4240 + }, + { + "epoch": 3.3876123876123874, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9075, + "step": 4241 + }, + { + "epoch": 3.3884115884115884, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8974, + "step": 4242 + }, + { + "epoch": 3.3892107892107894, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9078, + "step": 4243 + }, + { + "epoch": 3.39000999000999, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.9, + "step": 4244 + }, + { + "epoch": 3.3908091908091906, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 4245 + }, + { + "epoch": 3.3916083916083917, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.8972, + "step": 4246 + }, + { + "epoch": 3.3924075924075923, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9094, + "step": 4247 + }, + { + "epoch": 3.3932067932067933, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.9061, + "step": 4248 + }, + { + "epoch": 3.394005994005994, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.9072, + "step": 4249 + }, + { + "epoch": 3.394805194805195, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.9054, + "step": 4250 + }, + { + "epoch": 3.3956043956043955, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.898, + "step": 4251 + }, + { + "epoch": 3.3964035964035966, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.894, + "step": 4252 + }, + { + "epoch": 3.397202797202797, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 4253 + }, + { + "epoch": 3.398001998001998, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 4254 + }, + { + "epoch": 3.398801198801199, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9014, + "step": 4255 + }, + { + "epoch": 3.3996003996003994, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8969, + "step": 4256 + }, + { + "epoch": 3.4003996003996004, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.897, + "step": 4257 + }, + { + "epoch": 3.401198801198801, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.9043, + "step": 4258 + }, + { + "epoch": 3.401998001998002, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.8977, + "step": 4259 + }, + { + "epoch": 3.4027972027972027, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8983, + "step": 4260 + }, + { + "epoch": 3.4035964035964037, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.894, + "step": 4261 + }, + { + "epoch": 3.4043956043956043, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.9005, + "step": 4262 + }, + { + "epoch": 3.4051948051948053, + "grad_norm": 0.60546875, + "learning_rate": 0.0002, + "loss": 0.9031, + "step": 4263 + }, + { + "epoch": 3.405994005994006, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 4264 + }, + { + "epoch": 3.406793206793207, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.9003, + "step": 4265 + }, + { + "epoch": 3.4075924075924076, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.8978, + "step": 4266 + }, + { + "epoch": 3.408391608391608, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.9053, + "step": 4267 + }, + { + "epoch": 3.409190809190809, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 4268 + }, + { + "epoch": 3.4099900099900102, + "grad_norm": 0.62109375, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 4269 + }, + { + "epoch": 3.410789210789211, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 4270 + }, + { + "epoch": 3.4115884115884114, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.9131, + "step": 4271 + }, + { + "epoch": 3.4123876123876125, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 4272 + }, + { + "epoch": 3.413186813186813, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.8987, + "step": 4273 + }, + { + "epoch": 3.413986013986014, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.8993, + "step": 4274 + }, + { + "epoch": 3.4147852147852147, + "grad_norm": 0.62890625, + "learning_rate": 0.0002, + "loss": 0.9045, + "step": 4275 + }, + { + "epoch": 3.4155844155844157, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 4276 + }, + { + "epoch": 3.4163836163836163, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.9024, + "step": 4277 + }, + { + "epoch": 3.4171828171828174, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.897, + "step": 4278 + }, + { + "epoch": 3.417982017982018, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.8978, + "step": 4279 + }, + { + "epoch": 3.418781218781219, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9083, + "step": 4280 + }, + { + "epoch": 3.4195804195804196, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9023, + "step": 4281 + }, + { + "epoch": 3.42037962037962, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.902, + "step": 4282 + }, + { + "epoch": 3.421178821178821, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.9029, + "step": 4283 + }, + { + "epoch": 3.421978021978022, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.9005, + "step": 4284 + }, + { + "epoch": 3.422777222777223, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 0.9026, + "step": 4285 + }, + { + "epoch": 3.4235764235764234, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 4286 + }, + { + "epoch": 3.4243756243756245, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.9034, + "step": 4287 + }, + { + "epoch": 3.425174825174825, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9037, + "step": 4288 + }, + { + "epoch": 3.425974025974026, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.8917, + "step": 4289 + }, + { + "epoch": 3.4267732267732267, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 4290 + }, + { + "epoch": 3.4275724275724277, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.9034, + "step": 4291 + }, + { + "epoch": 3.4283716283716283, + "grad_norm": 0.5859375, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 4292 + }, + { + "epoch": 3.429170829170829, + "grad_norm": 0.66015625, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 4293 + }, + { + "epoch": 3.42997002997003, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.9024, + "step": 4294 + }, + { + "epoch": 3.430769230769231, + "grad_norm": 0.6015625, + "learning_rate": 0.0002, + "loss": 0.9045, + "step": 4295 + }, + { + "epoch": 3.4315684315684316, + "grad_norm": 0.58203125, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 4296 + }, + { + "epoch": 3.432367632367632, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 4297 + }, + { + "epoch": 3.4331668331668332, + "grad_norm": 0.5859375, + "learning_rate": 0.0002, + "loss": 0.9522, + "step": 4298 + }, + { + "epoch": 3.433966033966034, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.8972, + "step": 4299 + }, + { + "epoch": 3.434765234765235, + "grad_norm": 0.70703125, + "learning_rate": 0.0002, + "loss": 0.9427, + "step": 4300 + }, + { + "epoch": 3.4355644355644355, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9038, + "step": 4301 + }, + { + "epoch": 3.4363636363636365, + "grad_norm": 0.66015625, + "learning_rate": 0.0002, + "loss": 0.9032, + "step": 4302 + }, + { + "epoch": 3.437162837162837, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.9058, + "step": 4303 + }, + { + "epoch": 3.437962037962038, + "grad_norm": 1.7265625, + "learning_rate": 0.0002, + "loss": 0.939, + "step": 4304 + }, + { + "epoch": 3.4387612387612387, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.9048, + "step": 4305 + }, + { + "epoch": 3.4395604395604398, + "grad_norm": 0.62890625, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 4306 + }, + { + "epoch": 3.4403596403596404, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9099, + "step": 4307 + }, + { + "epoch": 3.441158841158841, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 4308 + }, + { + "epoch": 3.441958041958042, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 4309 + }, + { + "epoch": 3.4427572427572426, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 4310 + }, + { + "epoch": 3.4435564435564436, + "grad_norm": 0.640625, + "learning_rate": 0.0002, + "loss": 0.9054, + "step": 4311 + }, + { + "epoch": 3.4443556443556442, + "grad_norm": 0.74609375, + "learning_rate": 0.0002, + "loss": 0.904, + "step": 4312 + }, + { + "epoch": 3.4451548451548453, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.8992, + "step": 4313 + }, + { + "epoch": 3.445954045954046, + "grad_norm": 0.6328125, + "learning_rate": 0.0002, + "loss": 0.8957, + "step": 4314 + }, + { + "epoch": 3.446753246753247, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9081, + "step": 4315 + }, + { + "epoch": 3.4475524475524475, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.8843, + "step": 4316 + }, + { + "epoch": 3.4483516483516485, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9081, + "step": 4317 + }, + { + "epoch": 3.449150849150849, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9089, + "step": 4318 + }, + { + "epoch": 3.4499500499500497, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.9063, + "step": 4319 + }, + { + "epoch": 3.4507492507492508, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 4320 + }, + { + "epoch": 3.4515484515484514, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 4321 + }, + { + "epoch": 3.4523476523476524, + "grad_norm": 0.59375, + "learning_rate": 0.0002, + "loss": 0.8964, + "step": 4322 + }, + { + "epoch": 3.453146853146853, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 4323 + }, + { + "epoch": 3.453946053946054, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.8965, + "step": 4324 + }, + { + "epoch": 3.4547452547452546, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.8973, + "step": 4325 + }, + { + "epoch": 3.4555444555444557, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.8977, + "step": 4326 + }, + { + "epoch": 3.4563436563436563, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9043, + "step": 4327 + }, + { + "epoch": 3.4571428571428573, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.8994, + "step": 4328 + }, + { + "epoch": 3.457942057942058, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9051, + "step": 4329 + }, + { + "epoch": 3.458741258741259, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.904, + "step": 4330 + }, + { + "epoch": 3.4595404595404595, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 4331 + }, + { + "epoch": 3.4603396603396606, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.8966, + "step": 4332 + }, + { + "epoch": 3.461138861138861, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.8989, + "step": 4333 + }, + { + "epoch": 3.4619380619380618, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9034, + "step": 4334 + }, + { + "epoch": 3.462737262737263, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.8997, + "step": 4335 + }, + { + "epoch": 3.4635364635364634, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.9084, + "step": 4336 + }, + { + "epoch": 3.4643356643356644, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.908, + "step": 4337 + }, + { + "epoch": 3.465134865134865, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.903, + "step": 4338 + }, + { + "epoch": 3.465934065934066, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 4339 + }, + { + "epoch": 3.4667332667332666, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 4340 + }, + { + "epoch": 3.4675324675324677, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.9001, + "step": 4341 + }, + { + "epoch": 3.4683316683316683, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.8968, + "step": 4342 + }, + { + "epoch": 3.4691308691308693, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.894, + "step": 4343 + }, + { + "epoch": 3.46993006993007, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 4344 + }, + { + "epoch": 3.4707292707292705, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9053, + "step": 4345 + }, + { + "epoch": 3.4715284715284715, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 4346 + }, + { + "epoch": 3.472327672327672, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 4347 + }, + { + "epoch": 3.473126873126873, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 4348 + }, + { + "epoch": 3.4739260739260738, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9011, + "step": 4349 + }, + { + "epoch": 3.474725274725275, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.9014, + "step": 4350 + }, + { + "epoch": 3.4755244755244754, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 4351 + }, + { + "epoch": 3.4763236763236764, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.9016, + "step": 4352 + }, + { + "epoch": 3.477122877122877, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9007, + "step": 4353 + }, + { + "epoch": 3.477922077922078, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.8983, + "step": 4354 + }, + { + "epoch": 3.4787212787212787, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.9043, + "step": 4355 + }, + { + "epoch": 3.4795204795204797, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 4356 + }, + { + "epoch": 3.4803196803196803, + "grad_norm": 0.59765625, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 4357 + }, + { + "epoch": 3.4811188811188813, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.908, + "step": 4358 + }, + { + "epoch": 3.481918081918082, + "grad_norm": 0.62890625, + "learning_rate": 0.0002, + "loss": 0.9014, + "step": 4359 + }, + { + "epoch": 3.4827172827172825, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.9029, + "step": 4360 + }, + { + "epoch": 3.4835164835164836, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.9076, + "step": 4361 + }, + { + "epoch": 3.484315684315684, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.9, + "step": 4362 + }, + { + "epoch": 3.485114885114885, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.8979, + "step": 4363 + }, + { + "epoch": 3.485914085914086, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 4364 + }, + { + "epoch": 3.486713286713287, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.9074, + "step": 4365 + }, + { + "epoch": 3.4875124875124874, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 4366 + }, + { + "epoch": 3.4883116883116885, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 4367 + }, + { + "epoch": 3.489110889110889, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9023, + "step": 4368 + }, + { + "epoch": 3.48991008991009, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.897, + "step": 4369 + }, + { + "epoch": 3.4907092907092907, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 4370 + }, + { + "epoch": 3.4915084915084913, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.9038, + "step": 4371 + }, + { + "epoch": 3.4923076923076923, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 4372 + }, + { + "epoch": 3.493106893106893, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9003, + "step": 4373 + }, + { + "epoch": 3.493906093906094, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.9115, + "step": 4374 + }, + { + "epoch": 3.4947052947052946, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.8969, + "step": 4375 + }, + { + "epoch": 3.4955044955044956, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 4376 + }, + { + "epoch": 3.496303696303696, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.8995, + "step": 4377 + }, + { + "epoch": 3.4971028971028972, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.8997, + "step": 4378 + }, + { + "epoch": 3.497902097902098, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 4379 + }, + { + "epoch": 3.498701298701299, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.9054, + "step": 4380 + }, + { + "epoch": 3.4995004995004995, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.8989, + "step": 4381 + }, + { + "epoch": 3.5002997002997, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9024, + "step": 4382 + }, + { + "epoch": 3.501098901098901, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.8968, + "step": 4383 + }, + { + "epoch": 3.501898101898102, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.9136, + "step": 4384 + }, + { + "epoch": 3.5026973026973027, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.9085, + "step": 4385 + }, + { + "epoch": 3.5034965034965033, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9067, + "step": 4386 + }, + { + "epoch": 3.5042957042957044, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.9103, + "step": 4387 + }, + { + "epoch": 3.505094905094905, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9014, + "step": 4388 + }, + { + "epoch": 3.505894105894106, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 4389 + }, + { + "epoch": 3.5066933066933066, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.9014, + "step": 4390 + }, + { + "epoch": 3.5074925074925076, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 4391 + }, + { + "epoch": 3.508291708291708, + "grad_norm": 1.09375, + "learning_rate": 0.0002, + "loss": 0.9212, + "step": 4392 + }, + { + "epoch": 3.509090909090909, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.9011, + "step": 4393 + }, + { + "epoch": 3.50989010989011, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.9118, + "step": 4394 + }, + { + "epoch": 3.510689310689311, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.9031, + "step": 4395 + }, + { + "epoch": 3.5114885114885115, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8851, + "step": 4396 + }, + { + "epoch": 3.512287712287712, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.9003, + "step": 4397 + }, + { + "epoch": 3.513086913086913, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 4398 + }, + { + "epoch": 3.513886113886114, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 4399 + }, + { + "epoch": 3.5146853146853148, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.9009, + "step": 4400 + }, + { + "epoch": 3.5154845154845153, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 4401 + }, + { + "epoch": 3.5162837162837164, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 4402 + }, + { + "epoch": 3.517082917082917, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.9059, + "step": 4403 + }, + { + "epoch": 3.517882117882118, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.9025, + "step": 4404 + }, + { + "epoch": 3.5186813186813186, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.9019, + "step": 4405 + }, + { + "epoch": 3.5194805194805197, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.9032, + "step": 4406 + }, + { + "epoch": 3.5202797202797202, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.8955, + "step": 4407 + }, + { + "epoch": 3.521078921078921, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.9026, + "step": 4408 + }, + { + "epoch": 3.521878121878122, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.9032, + "step": 4409 + }, + { + "epoch": 3.522677322677323, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9048, + "step": 4410 + }, + { + "epoch": 3.5234765234765235, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 4411 + }, + { + "epoch": 3.524275724275724, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 4412 + }, + { + "epoch": 3.525074925074925, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.9015, + "step": 4413 + }, + { + "epoch": 3.5258741258741257, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9058, + "step": 4414 + }, + { + "epoch": 3.526673326673327, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.9013, + "step": 4415 + }, + { + "epoch": 3.5274725274725274, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 4416 + }, + { + "epoch": 3.5282717282717284, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 4417 + }, + { + "epoch": 3.529070929070929, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 4418 + }, + { + "epoch": 3.5298701298701296, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9034, + "step": 4419 + }, + { + "epoch": 3.5306693306693306, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.9059, + "step": 4420 + }, + { + "epoch": 3.5314685314685317, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 4421 + }, + { + "epoch": 3.5322677322677323, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 4422 + }, + { + "epoch": 3.533066933066933, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 4423 + }, + { + "epoch": 3.533866133866134, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 4424 + }, + { + "epoch": 3.5346653346653345, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.9076, + "step": 4425 + }, + { + "epoch": 3.5354645354645355, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 4426 + }, + { + "epoch": 3.536263736263736, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 4427 + }, + { + "epoch": 3.537062937062937, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 4428 + }, + { + "epoch": 3.5378621378621378, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.9046, + "step": 4429 + }, + { + "epoch": 3.538661338661339, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.899, + "step": 4430 + }, + { + "epoch": 3.5394605394605394, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 4431 + }, + { + "epoch": 3.5402597402597404, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8965, + "step": 4432 + }, + { + "epoch": 3.541058941058941, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.9007, + "step": 4433 + }, + { + "epoch": 3.5418581418581416, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.9063, + "step": 4434 + }, + { + "epoch": 3.5426573426573427, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.9016, + "step": 4435 + }, + { + "epoch": 3.5434565434565437, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.9044, + "step": 4436 + }, + { + "epoch": 3.5442557442557443, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.9052, + "step": 4437 + }, + { + "epoch": 3.545054945054945, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.9018, + "step": 4438 + }, + { + "epoch": 3.545854145854146, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8979, + "step": 4439 + }, + { + "epoch": 3.5466533466533465, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.9036, + "step": 4440 + }, + { + "epoch": 3.5474525474525476, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8968, + "step": 4441 + }, + { + "epoch": 3.548251748251748, + "grad_norm": 0.2451171875, + "learning_rate": 0.0002, + "loss": 0.8964, + "step": 4442 + }, + { + "epoch": 3.549050949050949, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.9092, + "step": 4443 + }, + { + "epoch": 3.54985014985015, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 4444 + }, + { + "epoch": 3.5506493506493504, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8997, + "step": 4445 + }, + { + "epoch": 3.5514485514485514, + "grad_norm": 0.2490234375, + "learning_rate": 0.0002, + "loss": 0.901, + "step": 4446 + }, + { + "epoch": 3.5522477522477525, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 4447 + }, + { + "epoch": 3.553046953046953, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.9007, + "step": 4448 + }, + { + "epoch": 3.5538461538461537, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.9084, + "step": 4449 + }, + { + "epoch": 3.5546453546453547, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8961, + "step": 4450 + }, + { + "epoch": 3.5554445554445553, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.9042, + "step": 4451 + }, + { + "epoch": 3.5562437562437563, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8987, + "step": 4452 + }, + { + "epoch": 3.557042957042957, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.909, + "step": 4453 + }, + { + "epoch": 3.557842157842158, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.906, + "step": 4454 + }, + { + "epoch": 3.5586413586413586, + "grad_norm": 0.8828125, + "learning_rate": 0.0002, + "loss": 0.9071, + "step": 4455 + }, + { + "epoch": 3.5594405594405596, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.9016, + "step": 4456 + }, + { + "epoch": 3.56023976023976, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 4457 + }, + { + "epoch": 3.5610389610389612, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.896, + "step": 4458 + }, + { + "epoch": 3.561838161838162, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9049, + "step": 4459 + }, + { + "epoch": 3.5626373626373624, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 4460 + }, + { + "epoch": 3.5634365634365635, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.8997, + "step": 4461 + }, + { + "epoch": 3.5642357642357645, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8994, + "step": 4462 + }, + { + "epoch": 3.565034965034965, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 4463 + }, + { + "epoch": 3.5658341658341657, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.9031, + "step": 4464 + }, + { + "epoch": 3.5666333666333667, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 4465 + }, + { + "epoch": 3.5674325674325673, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8997, + "step": 4466 + }, + { + "epoch": 3.5682317682317684, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9001, + "step": 4467 + }, + { + "epoch": 3.569030969030969, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9067, + "step": 4468 + }, + { + "epoch": 3.56983016983017, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 4469 + }, + { + "epoch": 3.5706293706293706, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 4470 + }, + { + "epoch": 3.571428571428571, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.9034, + "step": 4471 + }, + { + "epoch": 3.572227772227772, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.9012, + "step": 4472 + }, + { + "epoch": 3.5730269730269733, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.8977, + "step": 4473 + }, + { + "epoch": 3.573826173826174, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 4474 + }, + { + "epoch": 3.5746253746253744, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 4475 + }, + { + "epoch": 3.5754245754245755, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 4476 + }, + { + "epoch": 3.576223776223776, + "grad_norm": 0.6015625, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 4477 + }, + { + "epoch": 3.577022977022977, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.9021, + "step": 4478 + }, + { + "epoch": 3.5778221778221777, + "grad_norm": 0.59375, + "learning_rate": 0.0002, + "loss": 0.9086, + "step": 4479 + }, + { + "epoch": 3.5786213786213787, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 4480 + }, + { + "epoch": 3.5794205794205793, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.9002, + "step": 4481 + }, + { + "epoch": 3.5802197802197804, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9006, + "step": 4482 + }, + { + "epoch": 3.581018981018981, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.8999, + "step": 4483 + }, + { + "epoch": 3.581818181818182, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9074, + "step": 4484 + }, + { + "epoch": 3.5826173826173826, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 4485 + }, + { + "epoch": 3.583416583416583, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.9013, + "step": 4486 + }, + { + "epoch": 3.5842157842157842, + "grad_norm": 0.76953125, + "learning_rate": 0.0002, + "loss": 0.9152, + "step": 4487 + }, + { + "epoch": 3.5850149850149853, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9043, + "step": 4488 + }, + { + "epoch": 3.585814185814186, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9065, + "step": 4489 + }, + { + "epoch": 3.5866133866133865, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9088, + "step": 4490 + }, + { + "epoch": 3.5874125874125875, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.8955, + "step": 4491 + }, + { + "epoch": 3.588211788211788, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.894, + "step": 4492 + }, + { + "epoch": 3.589010989010989, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.8858, + "step": 4493 + }, + { + "epoch": 3.5898101898101897, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 4494 + }, + { + "epoch": 3.5906093906093908, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9083, + "step": 4495 + }, + { + "epoch": 3.5914085914085914, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9032, + "step": 4496 + }, + { + "epoch": 3.592207792207792, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 4497 + }, + { + "epoch": 3.593006993006993, + "grad_norm": 0.6015625, + "learning_rate": 0.0002, + "loss": 0.8989, + "step": 4498 + }, + { + "epoch": 3.593806193806194, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.8957, + "step": 4499 + }, + { + "epoch": 3.5946053946053946, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.908, + "step": 4500 + }, + { + "epoch": 3.5954045954045952, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 4501 + }, + { + "epoch": 3.5962037962037963, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9049, + "step": 4502 + }, + { + "epoch": 3.597002997002997, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 4503 + }, + { + "epoch": 3.597802197802198, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9073, + "step": 4504 + }, + { + "epoch": 3.5986013986013985, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 4505 + }, + { + "epoch": 3.5994005994005995, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.8917, + "step": 4506 + }, + { + "epoch": 3.6001998001998, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9021, + "step": 4507 + }, + { + "epoch": 3.600999000999001, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 4508 + }, + { + "epoch": 3.6017982017982018, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9019, + "step": 4509 + }, + { + "epoch": 3.602597402597403, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 4510 + }, + { + "epoch": 3.6033966033966034, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.9033, + "step": 4511 + }, + { + "epoch": 3.604195804195804, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.9013, + "step": 4512 + }, + { + "epoch": 3.604995004995005, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.9096, + "step": 4513 + }, + { + "epoch": 3.605794205794206, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.9073, + "step": 4514 + }, + { + "epoch": 3.6065934065934067, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9006, + "step": 4515 + }, + { + "epoch": 3.6073926073926073, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.9036, + "step": 4516 + }, + { + "epoch": 3.6081918081918083, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.9042, + "step": 4517 + }, + { + "epoch": 3.608991008991009, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.9035, + "step": 4518 + }, + { + "epoch": 3.60979020979021, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 4519 + }, + { + "epoch": 3.6105894105894105, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.9044, + "step": 4520 + }, + { + "epoch": 3.6113886113886116, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.9056, + "step": 4521 + }, + { + "epoch": 3.612187812187812, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8968, + "step": 4522 + }, + { + "epoch": 3.6129870129870127, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.8979, + "step": 4523 + }, + { + "epoch": 3.613786213786214, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8964, + "step": 4524 + }, + { + "epoch": 3.614585414585415, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9073, + "step": 4525 + }, + { + "epoch": 3.6153846153846154, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.8997, + "step": 4526 + }, + { + "epoch": 3.616183816183816, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.9029, + "step": 4527 + }, + { + "epoch": 3.616983016983017, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.8968, + "step": 4528 + }, + { + "epoch": 3.6177822177822176, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 4529 + }, + { + "epoch": 3.6185814185814187, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.8989, + "step": 4530 + }, + { + "epoch": 3.6193806193806193, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 4531 + }, + { + "epoch": 3.6201798201798203, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.9081, + "step": 4532 + }, + { + "epoch": 3.620979020979021, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.907, + "step": 4533 + }, + { + "epoch": 3.6217782217782215, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 4534 + }, + { + "epoch": 3.6225774225774225, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.9006, + "step": 4535 + }, + { + "epoch": 3.6233766233766236, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 4536 + }, + { + "epoch": 3.624175824175824, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 4537 + }, + { + "epoch": 3.6249750249750248, + "grad_norm": 0.64453125, + "learning_rate": 0.0002, + "loss": 0.8995, + "step": 4538 + }, + { + "epoch": 3.625774225774226, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 4539 + }, + { + "epoch": 3.626573426573427, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.9021, + "step": 4540 + }, + { + "epoch": 3.6273726273726274, + "grad_norm": 0.7109375, + "learning_rate": 0.0002, + "loss": 0.8989, + "step": 4541 + }, + { + "epoch": 3.628171828171828, + "grad_norm": 0.77734375, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 4542 + }, + { + "epoch": 3.628971028971029, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.9044, + "step": 4543 + }, + { + "epoch": 3.6297702297702297, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 0.9012, + "step": 4544 + }, + { + "epoch": 3.6305694305694307, + "grad_norm": 0.92578125, + "learning_rate": 0.0002, + "loss": 0.9034, + "step": 4545 + }, + { + "epoch": 3.6313686313686313, + "grad_norm": 0.92578125, + "learning_rate": 0.0002, + "loss": 0.9159, + "step": 4546 + }, + { + "epoch": 3.6321678321678323, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.8969, + "step": 4547 + }, + { + "epoch": 3.632967032967033, + "grad_norm": 0.578125, + "learning_rate": 0.0002, + "loss": 0.9066, + "step": 4548 + }, + { + "epoch": 3.6337662337662335, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9138, + "step": 4549 + }, + { + "epoch": 3.6345654345654346, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.8989, + "step": 4550 + }, + { + "epoch": 3.6353646353646356, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9034, + "step": 4551 + }, + { + "epoch": 3.636163836163836, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.897, + "step": 4552 + }, + { + "epoch": 3.636963036963037, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8977, + "step": 4553 + }, + { + "epoch": 3.637762237762238, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 4554 + }, + { + "epoch": 3.6385614385614384, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 4555 + }, + { + "epoch": 3.6393606393606395, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.9009, + "step": 4556 + }, + { + "epoch": 3.64015984015984, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 4557 + }, + { + "epoch": 3.640959040959041, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.9038, + "step": 4558 + }, + { + "epoch": 3.6417582417582417, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 4559 + }, + { + "epoch": 3.6425574425574423, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.9014, + "step": 4560 + }, + { + "epoch": 3.6433566433566433, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 4561 + }, + { + "epoch": 3.6441558441558444, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.8999, + "step": 4562 + }, + { + "epoch": 3.644955044955045, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 4563 + }, + { + "epoch": 3.6457542457542456, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.9021, + "step": 4564 + }, + { + "epoch": 3.6465534465534466, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 4565 + }, + { + "epoch": 3.647352647352647, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8974, + "step": 4566 + }, + { + "epoch": 3.6481518481518482, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8993, + "step": 4567 + }, + { + "epoch": 3.648951048951049, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 4568 + }, + { + "epoch": 3.64975024975025, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.9026, + "step": 4569 + }, + { + "epoch": 3.6505494505494505, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.9027, + "step": 4570 + }, + { + "epoch": 3.6513486513486515, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 4571 + }, + { + "epoch": 3.652147852147852, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8808, + "step": 4572 + }, + { + "epoch": 3.652947052947053, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.9047, + "step": 4573 + }, + { + "epoch": 3.6537462537462537, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 4574 + }, + { + "epoch": 3.6545454545454543, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 4575 + }, + { + "epoch": 3.6553446553446554, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9043, + "step": 4576 + }, + { + "epoch": 3.6561438561438564, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 4577 + }, + { + "epoch": 3.656943056943057, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.9426, + "step": 4578 + }, + { + "epoch": 3.6577422577422576, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9027, + "step": 4579 + }, + { + "epoch": 3.6585414585414586, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.901, + "step": 4580 + }, + { + "epoch": 3.659340659340659, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.9007, + "step": 4581 + }, + { + "epoch": 3.6601398601398603, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 4582 + }, + { + "epoch": 3.660939060939061, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.9009, + "step": 4583 + }, + { + "epoch": 3.661738261738262, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9082, + "step": 4584 + }, + { + "epoch": 3.6625374625374625, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 4585 + }, + { + "epoch": 3.663336663336663, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8926, + "step": 4586 + }, + { + "epoch": 3.664135864135864, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 4587 + }, + { + "epoch": 3.664935064935065, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 4588 + }, + { + "epoch": 3.6657342657342658, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8938, + "step": 4589 + }, + { + "epoch": 3.6665334665334663, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8974, + "step": 4590 + }, + { + "epoch": 3.6673326673326674, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.887, + "step": 4591 + }, + { + "epoch": 3.668131868131868, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.899, + "step": 4592 + }, + { + "epoch": 3.668931068931069, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 4593 + }, + { + "epoch": 3.6697302697302696, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 4594 + }, + { + "epoch": 3.6705294705294707, + "grad_norm": 0.25, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 4595 + }, + { + "epoch": 3.6713286713286712, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.9027, + "step": 4596 + }, + { + "epoch": 3.6721278721278723, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 4597 + }, + { + "epoch": 3.672927072927073, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 4598 + }, + { + "epoch": 3.673726273726274, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8923, + "step": 4599 + }, + { + "epoch": 3.6745254745254745, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.9008, + "step": 4600 + }, + { + "epoch": 3.675324675324675, + "grad_norm": 0.25390625, + "learning_rate": 0.0002, + "loss": 0.897, + "step": 4601 + }, + { + "epoch": 3.676123876123876, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8914, + "step": 4602 + }, + { + "epoch": 3.676923076923077, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8822, + "step": 4603 + }, + { + "epoch": 3.6777222777222778, + "grad_norm": 0.25390625, + "learning_rate": 0.0002, + "loss": 0.9047, + "step": 4604 + }, + { + "epoch": 3.6785214785214784, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8998, + "step": 4605 + }, + { + "epoch": 3.6793206793206794, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.9007, + "step": 4606 + }, + { + "epoch": 3.68011988011988, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.9049, + "step": 4607 + }, + { + "epoch": 3.680919080919081, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.901, + "step": 4608 + }, + { + "epoch": 3.6817182817182816, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.9012, + "step": 4609 + }, + { + "epoch": 3.6825174825174827, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 4610 + }, + { + "epoch": 3.6833166833166833, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8968, + "step": 4611 + }, + { + "epoch": 3.684115884115884, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 4612 + }, + { + "epoch": 3.684915084915085, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 4613 + }, + { + "epoch": 3.685714285714286, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 4614 + }, + { + "epoch": 3.6865134865134865, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 4615 + }, + { + "epoch": 3.687312687312687, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 4616 + }, + { + "epoch": 3.688111888111888, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 4617 + }, + { + "epoch": 3.6889110889110888, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8979, + "step": 4618 + }, + { + "epoch": 3.68971028971029, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.9044, + "step": 4619 + }, + { + "epoch": 3.6905094905094904, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.9055, + "step": 4620 + }, + { + "epoch": 3.6913086913086914, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.9106, + "step": 4621 + }, + { + "epoch": 3.692107892107892, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8942, + "step": 4622 + }, + { + "epoch": 3.692907092907093, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 4623 + }, + { + "epoch": 3.6937062937062937, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 4624 + }, + { + "epoch": 3.6945054945054947, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.903, + "step": 4625 + }, + { + "epoch": 3.6953046953046953, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8957, + "step": 4626 + }, + { + "epoch": 3.696103896103896, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.9117, + "step": 4627 + }, + { + "epoch": 3.696903096903097, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.9036, + "step": 4628 + }, + { + "epoch": 3.697702297702298, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 4629 + }, + { + "epoch": 3.6985014985014986, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 4630 + }, + { + "epoch": 3.699300699300699, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 4631 + }, + { + "epoch": 3.7000999000999, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 4632 + }, + { + "epoch": 3.700899100899101, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8866, + "step": 4633 + }, + { + "epoch": 3.701698301698302, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.9047, + "step": 4634 + }, + { + "epoch": 3.7024975024975024, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.9, + "step": 4635 + }, + { + "epoch": 3.7032967032967035, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.897, + "step": 4636 + }, + { + "epoch": 3.704095904095904, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 4637 + }, + { + "epoch": 3.7048951048951047, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 4638 + }, + { + "epoch": 3.7056943056943057, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 4639 + }, + { + "epoch": 3.7064935064935067, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.898, + "step": 4640 + }, + { + "epoch": 3.7072927072927073, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8985, + "step": 4641 + }, + { + "epoch": 3.708091908091908, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8942, + "step": 4642 + }, + { + "epoch": 3.708891108891109, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8923, + "step": 4643 + }, + { + "epoch": 3.7096903096903096, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8977, + "step": 4644 + }, + { + "epoch": 3.7104895104895106, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.9054, + "step": 4645 + }, + { + "epoch": 3.711288711288711, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8983, + "step": 4646 + }, + { + "epoch": 3.7120879120879122, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.9024, + "step": 4647 + }, + { + "epoch": 3.712887112887113, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 4648 + }, + { + "epoch": 3.7136863136863134, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 4649 + }, + { + "epoch": 3.7144855144855145, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.9081, + "step": 4650 + }, + { + "epoch": 3.7152847152847155, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.9014, + "step": 4651 + }, + { + "epoch": 3.716083916083916, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.9034, + "step": 4652 + }, + { + "epoch": 3.7168831168831167, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.9117, + "step": 4653 + }, + { + "epoch": 3.7176823176823177, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8974, + "step": 4654 + }, + { + "epoch": 3.7184815184815188, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.908, + "step": 4655 + }, + { + "epoch": 3.7192807192807193, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 4656 + }, + { + "epoch": 3.72007992007992, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 4657 + }, + { + "epoch": 3.720879120879121, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8966, + "step": 4658 + }, + { + "epoch": 3.7216783216783216, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.9009, + "step": 4659 + }, + { + "epoch": 3.7224775224775226, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 4660 + }, + { + "epoch": 3.723276723276723, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.9016, + "step": 4661 + }, + { + "epoch": 3.7240759240759242, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.9044, + "step": 4662 + }, + { + "epoch": 3.724875124875125, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.9097, + "step": 4663 + }, + { + "epoch": 3.7256743256743254, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.9037, + "step": 4664 + }, + { + "epoch": 3.7264735264735265, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 4665 + }, + { + "epoch": 3.7272727272727275, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8995, + "step": 4666 + }, + { + "epoch": 3.728071928071928, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.883, + "step": 4667 + }, + { + "epoch": 3.7288711288711287, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8818, + "step": 4668 + }, + { + "epoch": 3.7296703296703297, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 4669 + }, + { + "epoch": 3.7304695304695303, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 4670 + }, + { + "epoch": 3.7312687312687314, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8991, + "step": 4671 + }, + { + "epoch": 3.732067932067932, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 4672 + }, + { + "epoch": 3.732867132867133, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.937, + "step": 4673 + }, + { + "epoch": 3.7336663336663336, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8998, + "step": 4674 + }, + { + "epoch": 3.734465534465534, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.8992, + "step": 4675 + }, + { + "epoch": 3.7352647352647352, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 4676 + }, + { + "epoch": 3.7360639360639363, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.9039, + "step": 4677 + }, + { + "epoch": 3.736863136863137, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8977, + "step": 4678 + }, + { + "epoch": 3.7376623376623375, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8993, + "step": 4679 + }, + { + "epoch": 3.7384615384615385, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.9403, + "step": 4680 + }, + { + "epoch": 3.739260739260739, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.9033, + "step": 4681 + }, + { + "epoch": 3.74005994005994, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.9007, + "step": 4682 + }, + { + "epoch": 3.7408591408591407, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.9032, + "step": 4683 + }, + { + "epoch": 3.7416583416583418, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 4684 + }, + { + "epoch": 3.7424575424575424, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8987, + "step": 4685 + }, + { + "epoch": 3.7432567432567434, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 4686 + }, + { + "epoch": 3.744055944055944, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 4687 + }, + { + "epoch": 3.744855144855145, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.911, + "step": 4688 + }, + { + "epoch": 3.7456543456543456, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 4689 + }, + { + "epoch": 3.7464535464535462, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.8968, + "step": 4690 + }, + { + "epoch": 3.7472527472527473, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 4691 + }, + { + "epoch": 3.7480519480519483, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 4692 + }, + { + "epoch": 3.748851148851149, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 4693 + }, + { + "epoch": 3.7496503496503495, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.9039, + "step": 4694 + }, + { + "epoch": 3.7504495504495505, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.9013, + "step": 4695 + }, + { + "epoch": 3.751248751248751, + "grad_norm": 1.875, + "learning_rate": 0.0002, + "loss": 0.9421, + "step": 4696 + }, + { + "epoch": 3.752047952047952, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9017, + "step": 4697 + }, + { + "epoch": 3.7528471528471528, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.8979, + "step": 4698 + }, + { + "epoch": 3.753646353646354, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 4699 + }, + { + "epoch": 3.7544455544455544, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.9036, + "step": 4700 + }, + { + "epoch": 3.755244755244755, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 4701 + }, + { + "epoch": 3.756043956043956, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 4702 + }, + { + "epoch": 3.756843156843157, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 4703 + }, + { + "epoch": 3.7576423576423577, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 4704 + }, + { + "epoch": 3.7584415584415583, + "grad_norm": 1.015625, + "learning_rate": 0.0002, + "loss": 0.9157, + "step": 4705 + }, + { + "epoch": 3.7592407592407593, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 4706 + }, + { + "epoch": 3.76003996003996, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8979, + "step": 4707 + }, + { + "epoch": 3.760839160839161, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 4708 + }, + { + "epoch": 3.7616383616383615, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9048, + "step": 4709 + }, + { + "epoch": 3.7624375624375626, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.9003, + "step": 4710 + }, + { + "epoch": 3.763236763236763, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8955, + "step": 4711 + }, + { + "epoch": 3.764035964035964, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.894, + "step": 4712 + }, + { + "epoch": 3.764835164835165, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8979, + "step": 4713 + }, + { + "epoch": 3.765634365634366, + "grad_norm": 0.25390625, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 4714 + }, + { + "epoch": 3.7664335664335664, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8997, + "step": 4715 + }, + { + "epoch": 3.767232767232767, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8968, + "step": 4716 + }, + { + "epoch": 3.768031968031968, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.9061, + "step": 4717 + }, + { + "epoch": 3.768831168831169, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.9058, + "step": 4718 + }, + { + "epoch": 3.7696303696303697, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8974, + "step": 4719 + }, + { + "epoch": 3.7704295704295703, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.9022, + "step": 4720 + }, + { + "epoch": 3.7712287712287713, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.9076, + "step": 4721 + }, + { + "epoch": 3.772027972027972, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 4722 + }, + { + "epoch": 3.772827172827173, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8825, + "step": 4723 + }, + { + "epoch": 3.7736263736263735, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.9013, + "step": 4724 + }, + { + "epoch": 3.7744255744255746, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.9075, + "step": 4725 + }, + { + "epoch": 3.775224775224775, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 4726 + }, + { + "epoch": 3.7760239760239758, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 4727 + }, + { + "epoch": 3.776823176823177, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.8997, + "step": 4728 + }, + { + "epoch": 3.777622377622378, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.9036, + "step": 4729 + }, + { + "epoch": 3.7784215784215784, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 4730 + }, + { + "epoch": 3.779220779220779, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.8938, + "step": 4731 + }, + { + "epoch": 3.78001998001998, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 4732 + }, + { + "epoch": 3.7808191808191807, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.9014, + "step": 4733 + }, + { + "epoch": 3.7816183816183817, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 4734 + }, + { + "epoch": 3.7824175824175823, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 4735 + }, + { + "epoch": 3.7832167832167833, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 4736 + }, + { + "epoch": 3.784015984015984, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.898, + "step": 4737 + }, + { + "epoch": 3.784815184815185, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 4738 + }, + { + "epoch": 3.7856143856143856, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.9022, + "step": 4739 + }, + { + "epoch": 3.7864135864135866, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8992, + "step": 4740 + }, + { + "epoch": 3.787212787212787, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.905, + "step": 4741 + }, + { + "epoch": 3.788011988011988, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 4742 + }, + { + "epoch": 3.788811188811189, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8841, + "step": 4743 + }, + { + "epoch": 3.78961038961039, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 4744 + }, + { + "epoch": 3.7904095904095905, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.912, + "step": 4745 + }, + { + "epoch": 3.791208791208791, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.9083, + "step": 4746 + }, + { + "epoch": 3.792007992007992, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.906, + "step": 4747 + }, + { + "epoch": 3.7928071928071927, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 4748 + }, + { + "epoch": 3.7936063936063937, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 4749 + }, + { + "epoch": 3.7944055944055943, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.898, + "step": 4750 + }, + { + "epoch": 3.7952047952047954, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 4751 + }, + { + "epoch": 3.796003996003996, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.9043, + "step": 4752 + }, + { + "epoch": 3.7968031968031966, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 4753 + }, + { + "epoch": 3.7976023976023976, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.9057, + "step": 4754 + }, + { + "epoch": 3.7984015984015986, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 4755 + }, + { + "epoch": 3.7992007992007992, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.9021, + "step": 4756 + }, + { + "epoch": 3.8, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8871, + "step": 4757 + }, + { + "epoch": 3.800799200799201, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 4758 + }, + { + "epoch": 3.8015984015984015, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.8991, + "step": 4759 + }, + { + "epoch": 3.8023976023976025, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 4760 + }, + { + "epoch": 3.803196803196803, + "grad_norm": 0.92578125, + "learning_rate": 0.0002, + "loss": 0.9148, + "step": 4761 + }, + { + "epoch": 3.803996003996004, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.9006, + "step": 4762 + }, + { + "epoch": 3.8047952047952047, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.9075, + "step": 4763 + }, + { + "epoch": 3.8055944055944058, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.9386, + "step": 4764 + }, + { + "epoch": 3.8063936063936064, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 4765 + }, + { + "epoch": 3.8071928071928074, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.9003, + "step": 4766 + }, + { + "epoch": 3.807992007992008, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.9008, + "step": 4767 + }, + { + "epoch": 3.8087912087912086, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 4768 + }, + { + "epoch": 3.8095904095904096, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.9109, + "step": 4769 + }, + { + "epoch": 3.8103896103896107, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 4770 + }, + { + "epoch": 3.8111888111888113, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.9006, + "step": 4771 + }, + { + "epoch": 3.811988011988012, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 4772 + }, + { + "epoch": 3.812787212787213, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 4773 + }, + { + "epoch": 3.8135864135864135, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.902, + "step": 4774 + }, + { + "epoch": 3.8143856143856145, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 4775 + }, + { + "epoch": 3.815184815184815, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 4776 + }, + { + "epoch": 3.815984015984016, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8889, + "step": 4777 + }, + { + "epoch": 3.8167832167832167, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 4778 + }, + { + "epoch": 3.8175824175824173, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.9046, + "step": 4779 + }, + { + "epoch": 3.8183816183816184, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 4780 + }, + { + "epoch": 3.8191808191808194, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 4781 + }, + { + "epoch": 3.81998001998002, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.9001, + "step": 4782 + }, + { + "epoch": 3.8207792207792206, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 4783 + }, + { + "epoch": 3.8215784215784216, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8965, + "step": 4784 + }, + { + "epoch": 3.8223776223776222, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8968, + "step": 4785 + }, + { + "epoch": 3.8231768231768233, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.9027, + "step": 4786 + }, + { + "epoch": 3.823976023976024, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8966, + "step": 4787 + }, + { + "epoch": 3.824775224775225, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 4788 + }, + { + "epoch": 3.8255744255744255, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 4789 + }, + { + "epoch": 3.826373626373626, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 4790 + }, + { + "epoch": 3.827172827172827, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.9047, + "step": 4791 + }, + { + "epoch": 3.827972027972028, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.9016, + "step": 4792 + }, + { + "epoch": 3.8287712287712288, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 4793 + }, + { + "epoch": 3.8295704295704294, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 4794 + }, + { + "epoch": 3.8303696303696304, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.9002, + "step": 4795 + }, + { + "epoch": 3.8311688311688314, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.9008, + "step": 4796 + }, + { + "epoch": 3.831968031968032, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 4797 + }, + { + "epoch": 3.8327672327672326, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8985, + "step": 4798 + }, + { + "epoch": 3.8335664335664337, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 4799 + }, + { + "epoch": 3.8343656343656343, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 4800 + }, + { + "epoch": 3.8351648351648353, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8855, + "step": 4801 + }, + { + "epoch": 3.835964035964036, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9054, + "step": 4802 + }, + { + "epoch": 3.836763236763237, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 4803 + }, + { + "epoch": 3.8375624375624375, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.9022, + "step": 4804 + }, + { + "epoch": 3.838361638361638, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9016, + "step": 4805 + }, + { + "epoch": 3.839160839160839, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.8978, + "step": 4806 + }, + { + "epoch": 3.83996003996004, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9015, + "step": 4807 + }, + { + "epoch": 3.840759240759241, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 4808 + }, + { + "epoch": 3.8415584415584414, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 4809 + }, + { + "epoch": 3.8423576423576424, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9041, + "step": 4810 + }, + { + "epoch": 3.843156843156843, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 4811 + }, + { + "epoch": 3.843956043956044, + "grad_norm": 0.6328125, + "learning_rate": 0.0002, + "loss": 0.9017, + "step": 4812 + }, + { + "epoch": 3.8447552447552447, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 4813 + }, + { + "epoch": 3.8455544455544457, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9005, + "step": 4814 + }, + { + "epoch": 3.8463536463536463, + "grad_norm": 0.71484375, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 4815 + }, + { + "epoch": 3.847152847152847, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.8893, + "step": 4816 + }, + { + "epoch": 3.847952047952048, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.8906, + "step": 4817 + }, + { + "epoch": 3.848751248751249, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.9022, + "step": 4818 + }, + { + "epoch": 3.8495504495504496, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.9026, + "step": 4819 + }, + { + "epoch": 3.85034965034965, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.8886, + "step": 4820 + }, + { + "epoch": 3.851148851148851, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.9036, + "step": 4821 + }, + { + "epoch": 3.851948051948052, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9, + "step": 4822 + }, + { + "epoch": 3.852747252747253, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 4823 + }, + { + "epoch": 3.8535464535464534, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.9027, + "step": 4824 + }, + { + "epoch": 3.8543456543456545, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 4825 + }, + { + "epoch": 3.855144855144855, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 4826 + }, + { + "epoch": 3.855944055944056, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 4827 + }, + { + "epoch": 3.8567432567432567, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8966, + "step": 4828 + }, + { + "epoch": 3.8575424575424577, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9002, + "step": 4829 + }, + { + "epoch": 3.8583416583416583, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8854, + "step": 4830 + }, + { + "epoch": 3.859140859140859, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 4831 + }, + { + "epoch": 3.85994005994006, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 4832 + }, + { + "epoch": 3.860739260739261, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 4833 + }, + { + "epoch": 3.8615384615384616, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.9, + "step": 4834 + }, + { + "epoch": 3.862337662337662, + "grad_norm": 0.59765625, + "learning_rate": 0.0002, + "loss": 0.899, + "step": 4835 + }, + { + "epoch": 3.863136863136863, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.8991, + "step": 4836 + }, + { + "epoch": 3.863936063936064, + "grad_norm": 0.60546875, + "learning_rate": 0.0002, + "loss": 0.8985, + "step": 4837 + }, + { + "epoch": 3.864735264735265, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 4838 + }, + { + "epoch": 3.8655344655344654, + "grad_norm": 0.62890625, + "learning_rate": 0.0002, + "loss": 0.8994, + "step": 4839 + }, + { + "epoch": 3.8663336663336665, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 4840 + }, + { + "epoch": 3.867132867132867, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9033, + "step": 4841 + }, + { + "epoch": 3.8679320679320677, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.9026, + "step": 4842 + }, + { + "epoch": 3.8687312687312687, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 4843 + }, + { + "epoch": 3.8695304695304698, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.9073, + "step": 4844 + }, + { + "epoch": 3.8703296703296703, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 4845 + }, + { + "epoch": 3.871128871128871, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.8969, + "step": 4846 + }, + { + "epoch": 3.871928071928072, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 4847 + }, + { + "epoch": 3.8727272727272726, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 4848 + }, + { + "epoch": 3.8735264735264736, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 4849 + }, + { + "epoch": 3.874325674325674, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 4850 + }, + { + "epoch": 3.8751248751248752, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.898, + "step": 4851 + }, + { + "epoch": 3.875924075924076, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 4852 + }, + { + "epoch": 3.876723276723277, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.8983, + "step": 4853 + }, + { + "epoch": 3.8775224775224775, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.8867, + "step": 4854 + }, + { + "epoch": 3.8783216783216785, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 4855 + }, + { + "epoch": 3.879120879120879, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 4856 + }, + { + "epoch": 3.8799200799200797, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 4857 + }, + { + "epoch": 3.8807192807192807, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9009, + "step": 4858 + }, + { + "epoch": 3.8815184815184818, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 4859 + }, + { + "epoch": 3.8823176823176824, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 4860 + }, + { + "epoch": 3.883116883116883, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.8995, + "step": 4861 + }, + { + "epoch": 3.883916083916084, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9059, + "step": 4862 + }, + { + "epoch": 3.8847152847152846, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.8938, + "step": 4863 + }, + { + "epoch": 3.8855144855144856, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 4864 + }, + { + "epoch": 3.8863136863136862, + "grad_norm": 0.77734375, + "learning_rate": 0.0002, + "loss": 0.9462, + "step": 4865 + }, + { + "epoch": 3.8871128871128873, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.8997, + "step": 4866 + }, + { + "epoch": 3.887912087912088, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 4867 + }, + { + "epoch": 3.8887112887112885, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.8999, + "step": 4868 + }, + { + "epoch": 3.8895104895104895, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 4869 + }, + { + "epoch": 3.8903096903096905, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9047, + "step": 4870 + }, + { + "epoch": 3.891108891108891, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.8942, + "step": 4871 + }, + { + "epoch": 3.8919080919080917, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.9, + "step": 4872 + }, + { + "epoch": 3.8927072927072928, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 4873 + }, + { + "epoch": 3.8935064935064934, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 4874 + }, + { + "epoch": 3.8943056943056944, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.9032, + "step": 4875 + }, + { + "epoch": 3.895104895104895, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9062, + "step": 4876 + }, + { + "epoch": 3.895904095904096, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 4877 + }, + { + "epoch": 3.8967032967032966, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 4878 + }, + { + "epoch": 3.8975024975024977, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.8969, + "step": 4879 + }, + { + "epoch": 3.8983016983016983, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.8926, + "step": 4880 + }, + { + "epoch": 3.8991008991008993, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.9045, + "step": 4881 + }, + { + "epoch": 3.8999000999001, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.907, + "step": 4882 + }, + { + "epoch": 3.9006993006993005, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.8972, + "step": 4883 + }, + { + "epoch": 3.9014985014985015, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.898, + "step": 4884 + }, + { + "epoch": 3.9022977022977026, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.9061, + "step": 4885 + }, + { + "epoch": 3.903096903096903, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.8864, + "step": 4886 + }, + { + "epoch": 3.9038961038961038, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.901, + "step": 4887 + }, + { + "epoch": 3.904695304695305, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.8985, + "step": 4888 + }, + { + "epoch": 3.9054945054945054, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 4889 + }, + { + "epoch": 3.9062937062937064, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.9, + "step": 4890 + }, + { + "epoch": 3.907092907092907, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 4891 + }, + { + "epoch": 3.907892107892108, + "grad_norm": 0.6015625, + "learning_rate": 0.0002, + "loss": 0.8914, + "step": 4892 + }, + { + "epoch": 3.9086913086913087, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 4893 + }, + { + "epoch": 3.9094905094905092, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 4894 + }, + { + "epoch": 3.9102897102897103, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 4895 + }, + { + "epoch": 3.9110889110889113, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.8965, + "step": 4896 + }, + { + "epoch": 3.911888111888112, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 4897 + }, + { + "epoch": 3.9126873126873125, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 4898 + }, + { + "epoch": 3.9134865134865136, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 4899 + }, + { + "epoch": 3.914285714285714, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.9022, + "step": 4900 + }, + { + "epoch": 3.915084915084915, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.9048, + "step": 4901 + }, + { + "epoch": 3.915884115884116, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 4902 + }, + { + "epoch": 3.916683316683317, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9054, + "step": 4903 + }, + { + "epoch": 3.9174825174825174, + "grad_norm": 2.28125, + "learning_rate": 0.0002, + "loss": 0.9258, + "step": 4904 + }, + { + "epoch": 3.918281718281718, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 4905 + }, + { + "epoch": 3.919080919080919, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.9103, + "step": 4906 + }, + { + "epoch": 3.91988011988012, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.8974, + "step": 4907 + }, + { + "epoch": 3.9206793206793207, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9005, + "step": 4908 + }, + { + "epoch": 3.9214785214785213, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 4909 + }, + { + "epoch": 3.9222777222777223, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 4910 + }, + { + "epoch": 3.9230769230769234, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9066, + "step": 4911 + }, + { + "epoch": 3.923876123876124, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.898, + "step": 4912 + }, + { + "epoch": 3.9246753246753245, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.894, + "step": 4913 + }, + { + "epoch": 3.9254745254745256, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.8977, + "step": 4914 + }, + { + "epoch": 3.926273726273726, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 4915 + }, + { + "epoch": 3.927072927072927, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 4916 + }, + { + "epoch": 3.927872127872128, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 4917 + }, + { + "epoch": 3.928671328671329, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 4918 + }, + { + "epoch": 3.9294705294705294, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 4919 + }, + { + "epoch": 3.93026973026973, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.904, + "step": 4920 + }, + { + "epoch": 3.931068931068931, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.9005, + "step": 4921 + }, + { + "epoch": 3.931868131868132, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9008, + "step": 4922 + }, + { + "epoch": 3.9326673326673327, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 4923 + }, + { + "epoch": 3.9334665334665333, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.8999, + "step": 4924 + }, + { + "epoch": 3.9342657342657343, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 4925 + }, + { + "epoch": 3.935064935064935, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 4926 + }, + { + "epoch": 3.935864135864136, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 4927 + }, + { + "epoch": 3.9366633366633366, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 4928 + }, + { + "epoch": 3.9374625374625376, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9009, + "step": 4929 + }, + { + "epoch": 3.938261738261738, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 4930 + }, + { + "epoch": 3.939060939060939, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.8968, + "step": 4931 + }, + { + "epoch": 3.93986013986014, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.9016, + "step": 4932 + }, + { + "epoch": 3.940659340659341, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.897, + "step": 4933 + }, + { + "epoch": 3.9414585414585415, + "grad_norm": 0.60546875, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 4934 + }, + { + "epoch": 3.942257742257742, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.9006, + "step": 4935 + }, + { + "epoch": 3.943056943056943, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 4936 + }, + { + "epoch": 3.9438561438561437, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.8966, + "step": 4937 + }, + { + "epoch": 3.9446553446553447, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 4938 + }, + { + "epoch": 3.9454545454545453, + "grad_norm": 0.64453125, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 4939 + }, + { + "epoch": 3.9462537462537464, + "grad_norm": 0.61328125, + "learning_rate": 0.0002, + "loss": 0.904, + "step": 4940 + }, + { + "epoch": 3.947052947052947, + "grad_norm": 0.62890625, + "learning_rate": 0.0002, + "loss": 0.907, + "step": 4941 + }, + { + "epoch": 3.947852147852148, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.8977, + "step": 4942 + }, + { + "epoch": 3.9486513486513486, + "grad_norm": 0.59375, + "learning_rate": 0.0002, + "loss": 0.8944, + "step": 4943 + }, + { + "epoch": 3.9494505494505496, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 4944 + }, + { + "epoch": 3.9502497502497502, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 4945 + }, + { + "epoch": 3.951048951048951, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.901, + "step": 4946 + }, + { + "epoch": 3.951848151848152, + "grad_norm": 0.59375, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 4947 + }, + { + "epoch": 3.952647352647353, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 4948 + }, + { + "epoch": 3.9534465534465535, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.896, + "step": 4949 + }, + { + "epoch": 3.954245754245754, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 4950 + }, + { + "epoch": 3.955044955044955, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 4951 + }, + { + "epoch": 3.9558441558441557, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.9065, + "step": 4952 + }, + { + "epoch": 3.9566433566433568, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 4953 + }, + { + "epoch": 3.9574425574425574, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 4954 + }, + { + "epoch": 3.9582417582417584, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.9046, + "step": 4955 + }, + { + "epoch": 3.959040959040959, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 4956 + }, + { + "epoch": 3.9598401598401596, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.8998, + "step": 4957 + }, + { + "epoch": 3.9606393606393606, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 4958 + }, + { + "epoch": 3.9614385614385617, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 4959 + }, + { + "epoch": 3.9622377622377623, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.9001, + "step": 4960 + }, + { + "epoch": 3.963036963036963, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.8978, + "step": 4961 + }, + { + "epoch": 3.963836163836164, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 4962 + }, + { + "epoch": 3.9646353646353645, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 4963 + }, + { + "epoch": 3.9654345654345655, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 4964 + }, + { + "epoch": 3.966233766233766, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 4965 + }, + { + "epoch": 3.967032967032967, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 4966 + }, + { + "epoch": 3.9678321678321677, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.9035, + "step": 4967 + }, + { + "epoch": 3.968631368631369, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.8944, + "step": 4968 + }, + { + "epoch": 3.9694305694305694, + "grad_norm": 0.578125, + "learning_rate": 0.0002, + "loss": 0.9146, + "step": 4969 + }, + { + "epoch": 3.9702297702297704, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 4970 + }, + { + "epoch": 3.971028971028971, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.8888, + "step": 4971 + }, + { + "epoch": 3.9718281718281716, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9053, + "step": 4972 + }, + { + "epoch": 3.9726273726273726, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.8957, + "step": 4973 + }, + { + "epoch": 3.9734265734265737, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9014, + "step": 4974 + }, + { + "epoch": 3.9742257742257743, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.896, + "step": 4975 + }, + { + "epoch": 3.975024975024975, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.9007, + "step": 4976 + }, + { + "epoch": 3.975824175824176, + "grad_norm": 0.59765625, + "learning_rate": 0.0002, + "loss": 0.901, + "step": 4977 + }, + { + "epoch": 3.9766233766233765, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 4978 + }, + { + "epoch": 3.9774225774225775, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.9068, + "step": 4979 + }, + { + "epoch": 3.978221778221778, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 4980 + }, + { + "epoch": 3.979020979020979, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9022, + "step": 4981 + }, + { + "epoch": 3.9798201798201798, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.8983, + "step": 4982 + }, + { + "epoch": 3.9806193806193804, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.9015, + "step": 4983 + }, + { + "epoch": 3.9814185814185814, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 4984 + }, + { + "epoch": 3.9822177822177824, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.8978, + "step": 4985 + }, + { + "epoch": 3.983016983016983, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.902, + "step": 4986 + }, + { + "epoch": 3.9838161838161836, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 4987 + }, + { + "epoch": 3.9846153846153847, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.8969, + "step": 4988 + }, + { + "epoch": 3.9854145854145853, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9025, + "step": 4989 + }, + { + "epoch": 3.9862137862137863, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 4990 + }, + { + "epoch": 3.987012987012987, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 4991 + }, + { + "epoch": 3.987812187812188, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.8973, + "step": 4992 + }, + { + "epoch": 3.9886113886113885, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.9031, + "step": 4993 + }, + { + "epoch": 3.9894105894105896, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.8985, + "step": 4994 + }, + { + "epoch": 3.99020979020979, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 4995 + }, + { + "epoch": 3.991008991008991, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.8957, + "step": 4996 + }, + { + "epoch": 3.991808191808192, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9105, + "step": 4997 + }, + { + "epoch": 3.9926073926073924, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 4998 + }, + { + "epoch": 3.9934065934065934, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 4999 + }, + { + "epoch": 3.9942057942057945, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9037, + "step": 5000 + }, + { + "epoch": 3.995004995004995, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 5001 + }, + { + "epoch": 3.9958041958041957, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 5002 + }, + { + "epoch": 3.9966033966033967, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.9044, + "step": 5003 + }, + { + "epoch": 3.9974025974025973, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.9, + "step": 5004 + }, + { + "epoch": 3.9982017982017983, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 5005 + }, + { + "epoch": 3.999000999000999, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 5006 + }, + { + "epoch": 3.9998001998002, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.902, + "step": 5007 + }, + { + "epoch": 4.0, + "grad_norm": 0.11865234375, + "learning_rate": 0.0002, + "loss": 0.2225, + "step": 5008 + }, + { + "epoch": 4.000799200799201, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 5009 + }, + { + "epoch": 4.001598401598401, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 5010 + }, + { + "epoch": 4.002397602397602, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 5011 + }, + { + "epoch": 4.003196803196803, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9005, + "step": 5012 + }, + { + "epoch": 4.003996003996004, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.8964, + "step": 5013 + }, + { + "epoch": 4.0047952047952045, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 5014 + }, + { + "epoch": 4.0055944055944055, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 5015 + }, + { + "epoch": 4.0063936063936065, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 5016 + }, + { + "epoch": 4.007192807192808, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.898, + "step": 5017 + }, + { + "epoch": 4.007992007992008, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9013, + "step": 5018 + }, + { + "epoch": 4.008791208791209, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8964, + "step": 5019 + }, + { + "epoch": 4.00959040959041, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 5020 + }, + { + "epoch": 4.01038961038961, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.9087, + "step": 5021 + }, + { + "epoch": 4.011188811188811, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.9005, + "step": 5022 + }, + { + "epoch": 4.011988011988012, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 5023 + }, + { + "epoch": 4.012787212787213, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.9006, + "step": 5024 + }, + { + "epoch": 4.013586413586413, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.907, + "step": 5025 + }, + { + "epoch": 4.014385614385614, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 5026 + }, + { + "epoch": 4.015184815184815, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.9052, + "step": 5027 + }, + { + "epoch": 4.015984015984016, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.9008, + "step": 5028 + }, + { + "epoch": 4.0167832167832165, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 5029 + }, + { + "epoch": 4.0175824175824175, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 5030 + }, + { + "epoch": 4.018381618381619, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.9039, + "step": 5031 + }, + { + "epoch": 4.01918081918082, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8995, + "step": 5032 + }, + { + "epoch": 4.01998001998002, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 5033 + }, + { + "epoch": 4.020779220779221, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 5034 + }, + { + "epoch": 4.021578421578422, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 5035 + }, + { + "epoch": 4.022377622377622, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8917, + "step": 5036 + }, + { + "epoch": 4.023176823176823, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8978, + "step": 5037 + }, + { + "epoch": 4.023976023976024, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 5038 + }, + { + "epoch": 4.024775224775225, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8938, + "step": 5039 + }, + { + "epoch": 4.025574425574425, + "grad_norm": 0.2392578125, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 5040 + }, + { + "epoch": 4.026373626373626, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8964, + "step": 5041 + }, + { + "epoch": 4.027172827172827, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8994, + "step": 5042 + }, + { + "epoch": 4.027972027972028, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8957, + "step": 5043 + }, + { + "epoch": 4.0287712287712285, + "grad_norm": 0.25390625, + "learning_rate": 0.0002, + "loss": 0.9055, + "step": 5044 + }, + { + "epoch": 4.0295704295704295, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.9018, + "step": 5045 + }, + { + "epoch": 4.030369630369631, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 5046 + }, + { + "epoch": 4.031168831168831, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 5047 + }, + { + "epoch": 4.031968031968032, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.9351, + "step": 5048 + }, + { + "epoch": 4.032767232767233, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.9047, + "step": 5049 + }, + { + "epoch": 4.033566433566434, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.9009, + "step": 5050 + }, + { + "epoch": 4.034365634365634, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 5051 + }, + { + "epoch": 4.035164835164835, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8978, + "step": 5052 + }, + { + "epoch": 4.035964035964036, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 5053 + }, + { + "epoch": 4.036763236763237, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.8979, + "step": 5054 + }, + { + "epoch": 4.037562437562437, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 5055 + }, + { + "epoch": 4.038361638361638, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 5056 + }, + { + "epoch": 4.039160839160839, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.8991, + "step": 5057 + }, + { + "epoch": 4.03996003996004, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 5058 + }, + { + "epoch": 4.0407592407592405, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 5059 + }, + { + "epoch": 4.041558441558442, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 5060 + }, + { + "epoch": 4.042357642357643, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.9014, + "step": 5061 + }, + { + "epoch": 4.043156843156843, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 5062 + }, + { + "epoch": 4.043956043956044, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 5063 + }, + { + "epoch": 4.044755244755245, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 5064 + }, + { + "epoch": 4.045554445554446, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 5065 + }, + { + "epoch": 4.046353646353646, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 5066 + }, + { + "epoch": 4.047152847152847, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9014, + "step": 5067 + }, + { + "epoch": 4.047952047952048, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.9042, + "step": 5068 + }, + { + "epoch": 4.048751248751249, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.8965, + "step": 5069 + }, + { + "epoch": 4.049550449550449, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.9029, + "step": 5070 + }, + { + "epoch": 4.05034965034965, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 5071 + }, + { + "epoch": 4.051148851148851, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 5072 + }, + { + "epoch": 4.0519480519480515, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.899, + "step": 5073 + }, + { + "epoch": 4.052747252747253, + "grad_norm": 1.1875, + "learning_rate": 0.0002, + "loss": 0.9141, + "step": 5074 + }, + { + "epoch": 4.053546453546454, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 5075 + }, + { + "epoch": 4.054345654345655, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.898, + "step": 5076 + }, + { + "epoch": 4.055144855144855, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 5077 + }, + { + "epoch": 4.055944055944056, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 5078 + }, + { + "epoch": 4.056743256743257, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8813, + "step": 5079 + }, + { + "epoch": 4.057542457542458, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.9022, + "step": 5080 + }, + { + "epoch": 4.058341658341658, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8987, + "step": 5081 + }, + { + "epoch": 4.059140859140859, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8923, + "step": 5082 + }, + { + "epoch": 4.05994005994006, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 5083 + }, + { + "epoch": 4.060739260739261, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8989, + "step": 5084 + }, + { + "epoch": 4.061538461538461, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 5085 + }, + { + "epoch": 4.062337662337662, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.9012, + "step": 5086 + }, + { + "epoch": 4.063136863136863, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 5087 + }, + { + "epoch": 4.0639360639360635, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.9036, + "step": 5088 + }, + { + "epoch": 4.064735264735265, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.9019, + "step": 5089 + }, + { + "epoch": 4.065534465534466, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 5090 + }, + { + "epoch": 4.066333666333667, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 5091 + }, + { + "epoch": 4.067132867132867, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 5092 + }, + { + "epoch": 4.067932067932068, + "grad_norm": 0.91796875, + "learning_rate": 0.0002, + "loss": 0.9019, + "step": 5093 + }, + { + "epoch": 4.068731268731269, + "grad_norm": 1.1875, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 5094 + }, + { + "epoch": 4.06953046953047, + "grad_norm": 0.921875, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 5095 + }, + { + "epoch": 4.07032967032967, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.9081, + "step": 5096 + }, + { + "epoch": 4.071128871128871, + "grad_norm": 0.5859375, + "learning_rate": 0.0002, + "loss": 0.9016, + "step": 5097 + }, + { + "epoch": 4.071928071928072, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 5098 + }, + { + "epoch": 4.072727272727272, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.8911, + "step": 5099 + }, + { + "epoch": 4.073526473526473, + "grad_norm": 0.6953125, + "learning_rate": 0.0002, + "loss": 0.9017, + "step": 5100 + }, + { + "epoch": 4.074325674325674, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 5101 + }, + { + "epoch": 4.075124875124875, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9001, + "step": 5102 + }, + { + "epoch": 4.075924075924076, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.8906, + "step": 5103 + }, + { + "epoch": 4.076723276723277, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.9082, + "step": 5104 + }, + { + "epoch": 4.077522477522478, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 5105 + }, + { + "epoch": 4.078321678321679, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 5106 + }, + { + "epoch": 4.079120879120879, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8911, + "step": 5107 + }, + { + "epoch": 4.07992007992008, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8888, + "step": 5108 + }, + { + "epoch": 4.080719280719281, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.9018, + "step": 5109 + }, + { + "epoch": 4.081518481518482, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8906, + "step": 5110 + }, + { + "epoch": 4.082317682317682, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 5111 + }, + { + "epoch": 4.083116883116883, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 5112 + }, + { + "epoch": 4.083916083916084, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8957, + "step": 5113 + }, + { + "epoch": 4.084715284715284, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.9036, + "step": 5114 + }, + { + "epoch": 4.085514485514485, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9034, + "step": 5115 + }, + { + "epoch": 4.086313686313686, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 5116 + }, + { + "epoch": 4.0871128871128874, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 5117 + }, + { + "epoch": 4.087912087912088, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.8961, + "step": 5118 + }, + { + "epoch": 4.088711288711289, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 5119 + }, + { + "epoch": 4.08951048951049, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 5120 + }, + { + "epoch": 4.090309690309691, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8991, + "step": 5121 + }, + { + "epoch": 4.091108891108891, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 5122 + }, + { + "epoch": 4.091908091908092, + "grad_norm": 0.98046875, + "learning_rate": 0.0002, + "loss": 0.9368, + "step": 5123 + }, + { + "epoch": 4.092707292707293, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.902, + "step": 5124 + }, + { + "epoch": 4.093506493506493, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.9007, + "step": 5125 + }, + { + "epoch": 4.094305694305694, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.9024, + "step": 5126 + }, + { + "epoch": 4.095104895104895, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 5127 + }, + { + "epoch": 4.095904095904096, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 5128 + }, + { + "epoch": 4.096703296703296, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 5129 + }, + { + "epoch": 4.097502497502497, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 5130 + }, + { + "epoch": 4.098301698301698, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 5131 + }, + { + "epoch": 4.0991008991008995, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.9025, + "step": 5132 + }, + { + "epoch": 4.0999000999001, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 5133 + }, + { + "epoch": 4.100699300699301, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 5134 + }, + { + "epoch": 4.101498501498502, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 5135 + }, + { + "epoch": 4.102297702297703, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.8877, + "step": 5136 + }, + { + "epoch": 4.103096903096903, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 5137 + }, + { + "epoch": 4.103896103896104, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 5138 + }, + { + "epoch": 4.104695304695305, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.897, + "step": 5139 + }, + { + "epoch": 4.105494505494505, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.8917, + "step": 5140 + }, + { + "epoch": 4.106293706293706, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8877, + "step": 5141 + }, + { + "epoch": 4.107092907092907, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.897, + "step": 5142 + }, + { + "epoch": 4.107892107892108, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.9008, + "step": 5143 + }, + { + "epoch": 4.108691308691308, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.9009, + "step": 5144 + }, + { + "epoch": 4.109490509490509, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.8974, + "step": 5145 + }, + { + "epoch": 4.1102897102897105, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 5146 + }, + { + "epoch": 4.1110889110889115, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 5147 + }, + { + "epoch": 4.111888111888112, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 5148 + }, + { + "epoch": 4.112687312687313, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 5149 + }, + { + "epoch": 4.113486513486514, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 5150 + }, + { + "epoch": 4.114285714285714, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.901, + "step": 5151 + }, + { + "epoch": 4.115084915084915, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 5152 + }, + { + "epoch": 4.115884115884116, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 5153 + }, + { + "epoch": 4.116683316683317, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 5154 + }, + { + "epoch": 4.117482517482517, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8893, + "step": 5155 + }, + { + "epoch": 4.118281718281718, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8987, + "step": 5156 + }, + { + "epoch": 4.119080919080919, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8942, + "step": 5157 + }, + { + "epoch": 4.11988011988012, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.902, + "step": 5158 + }, + { + "epoch": 4.12067932067932, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 5159 + }, + { + "epoch": 4.1214785214785215, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 5160 + }, + { + "epoch": 4.1222777222777225, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.901, + "step": 5161 + }, + { + "epoch": 4.123076923076923, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 5162 + }, + { + "epoch": 4.123876123876124, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.9009, + "step": 5163 + }, + { + "epoch": 4.124675324675325, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 5164 + }, + { + "epoch": 4.125474525474526, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 5165 + }, + { + "epoch": 4.126273726273726, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.9014, + "step": 5166 + }, + { + "epoch": 4.127072927072927, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8993, + "step": 5167 + }, + { + "epoch": 4.127872127872128, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 5168 + }, + { + "epoch": 4.128671328671329, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.9015, + "step": 5169 + }, + { + "epoch": 4.129470529470529, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 5170 + }, + { + "epoch": 4.13026973026973, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.9013, + "step": 5171 + }, + { + "epoch": 4.131068931068931, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.9035, + "step": 5172 + }, + { + "epoch": 4.131868131868132, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 5173 + }, + { + "epoch": 4.132667332667332, + "grad_norm": 1.015625, + "learning_rate": 0.0002, + "loss": 0.9135, + "step": 5174 + }, + { + "epoch": 4.1334665334665335, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 5175 + }, + { + "epoch": 4.1342657342657345, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.8977, + "step": 5176 + }, + { + "epoch": 4.135064935064935, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 5177 + }, + { + "epoch": 4.135864135864136, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.9082, + "step": 5178 + }, + { + "epoch": 4.136663336663337, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8965, + "step": 5179 + }, + { + "epoch": 4.137462537462538, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.9011, + "step": 5180 + }, + { + "epoch": 4.138261738261738, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8822, + "step": 5181 + }, + { + "epoch": 4.139060939060939, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 5182 + }, + { + "epoch": 4.13986013986014, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.8989, + "step": 5183 + }, + { + "epoch": 4.140659340659341, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 5184 + }, + { + "epoch": 4.141458541458541, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.9007, + "step": 5185 + }, + { + "epoch": 4.142257742257742, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.9024, + "step": 5186 + }, + { + "epoch": 4.143056943056943, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 5187 + }, + { + "epoch": 4.143856143856144, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.9006, + "step": 5188 + }, + { + "epoch": 4.1446553446553445, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8957, + "step": 5189 + }, + { + "epoch": 4.1454545454545455, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.904, + "step": 5190 + }, + { + "epoch": 4.1462537462537465, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 5191 + }, + { + "epoch": 4.147052947052947, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8886, + "step": 5192 + }, + { + "epoch": 4.147852147852148, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8998, + "step": 5193 + }, + { + "epoch": 4.148651348651349, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 5194 + }, + { + "epoch": 4.14945054945055, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 5195 + }, + { + "epoch": 4.15024975024975, + "grad_norm": 0.25390625, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 5196 + }, + { + "epoch": 4.151048951048951, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9025, + "step": 5197 + }, + { + "epoch": 4.151848151848152, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.899, + "step": 5198 + }, + { + "epoch": 4.152647352647353, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8862, + "step": 5199 + }, + { + "epoch": 4.153446553446553, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.9015, + "step": 5200 + }, + { + "epoch": 4.154245754245754, + "grad_norm": 0.25, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 5201 + }, + { + "epoch": 4.155044955044955, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 5202 + }, + { + "epoch": 4.1558441558441555, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.9037, + "step": 5203 + }, + { + "epoch": 4.1566433566433565, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8979, + "step": 5204 + }, + { + "epoch": 4.1574425574425575, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 5205 + }, + { + "epoch": 4.158241758241759, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 5206 + }, + { + "epoch": 4.159040959040959, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 5207 + }, + { + "epoch": 4.15984015984016, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.894, + "step": 5208 + }, + { + "epoch": 4.160639360639361, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 5209 + }, + { + "epoch": 4.161438561438562, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 5210 + }, + { + "epoch": 4.162237762237762, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 5211 + }, + { + "epoch": 4.163036963036963, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9026, + "step": 5212 + }, + { + "epoch": 4.163836163836164, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 5213 + }, + { + "epoch": 4.164635364635364, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 5214 + }, + { + "epoch": 4.165434565434565, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 5215 + }, + { + "epoch": 4.166233766233766, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8854, + "step": 5216 + }, + { + "epoch": 4.167032967032967, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.9034, + "step": 5217 + }, + { + "epoch": 4.1678321678321675, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.9017, + "step": 5218 + }, + { + "epoch": 4.1686313686313685, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 5219 + }, + { + "epoch": 4.1694305694305696, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 5220 + }, + { + "epoch": 4.170229770229771, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 5221 + }, + { + "epoch": 4.171028971028971, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 5222 + }, + { + "epoch": 4.171828171828172, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 5223 + }, + { + "epoch": 4.172627372627373, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 5224 + }, + { + "epoch": 4.173426573426573, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.9018, + "step": 5225 + }, + { + "epoch": 4.174225774225774, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8991, + "step": 5226 + }, + { + "epoch": 4.175024975024975, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 5227 + }, + { + "epoch": 4.175824175824176, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8875, + "step": 5228 + }, + { + "epoch": 4.176623376623376, + "grad_norm": 1.1953125, + "learning_rate": 0.0002, + "loss": 0.9052, + "step": 5229 + }, + { + "epoch": 4.177422577422577, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.896, + "step": 5230 + }, + { + "epoch": 4.178221778221778, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 5231 + }, + { + "epoch": 4.179020979020979, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.885, + "step": 5232 + }, + { + "epoch": 4.1798201798201795, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.9, + "step": 5233 + }, + { + "epoch": 4.1806193806193805, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.9037, + "step": 5234 + }, + { + "epoch": 4.181418581418582, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 5235 + }, + { + "epoch": 4.182217782217783, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8998, + "step": 5236 + }, + { + "epoch": 4.183016983016983, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8983, + "step": 5237 + }, + { + "epoch": 4.183816183816184, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8974, + "step": 5238 + }, + { + "epoch": 4.184615384615385, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 5239 + }, + { + "epoch": 4.185414585414585, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 5240 + }, + { + "epoch": 4.186213786213786, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 5241 + }, + { + "epoch": 4.187012987012987, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 5242 + }, + { + "epoch": 4.187812187812188, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.894, + "step": 5243 + }, + { + "epoch": 4.188611388611388, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 5244 + }, + { + "epoch": 4.189410589410589, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.9054, + "step": 5245 + }, + { + "epoch": 4.19020979020979, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 5246 + }, + { + "epoch": 4.191008991008991, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.9019, + "step": 5247 + }, + { + "epoch": 4.1918081918081915, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8973, + "step": 5248 + }, + { + "epoch": 4.192607392607393, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8995, + "step": 5249 + }, + { + "epoch": 4.193406593406594, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 5250 + }, + { + "epoch": 4.194205794205795, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.9007, + "step": 5251 + }, + { + "epoch": 4.195004995004995, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 5252 + }, + { + "epoch": 4.195804195804196, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.9013, + "step": 5253 + }, + { + "epoch": 4.196603396603397, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8869, + "step": 5254 + }, + { + "epoch": 4.197402597402597, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.9055, + "step": 5255 + }, + { + "epoch": 4.198201798201798, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.9002, + "step": 5256 + }, + { + "epoch": 4.199000999000999, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8884, + "step": 5257 + }, + { + "epoch": 4.1998001998002, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.894, + "step": 5258 + }, + { + "epoch": 4.2005994005994, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 5259 + }, + { + "epoch": 4.201398601398601, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 5260 + }, + { + "epoch": 4.202197802197802, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.9115, + "step": 5261 + }, + { + "epoch": 4.202997002997003, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.8965, + "step": 5262 + }, + { + "epoch": 4.203796203796204, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.8938, + "step": 5263 + }, + { + "epoch": 4.204595404595405, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8993, + "step": 5264 + }, + { + "epoch": 4.205394605394606, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 5265 + }, + { + "epoch": 4.206193806193806, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 5266 + }, + { + "epoch": 4.206993006993007, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.8917, + "step": 5267 + }, + { + "epoch": 4.207792207792208, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 5268 + }, + { + "epoch": 4.208591408591409, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8973, + "step": 5269 + }, + { + "epoch": 4.209390609390609, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.8917, + "step": 5270 + }, + { + "epoch": 4.21018981018981, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9007, + "step": 5271 + }, + { + "epoch": 4.210989010989011, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 5272 + }, + { + "epoch": 4.211788211788212, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 5273 + }, + { + "epoch": 4.212587412587412, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 5274 + }, + { + "epoch": 4.213386613386613, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8964, + "step": 5275 + }, + { + "epoch": 4.214185814185814, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.8966, + "step": 5276 + }, + { + "epoch": 4.2149850149850145, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 5277 + }, + { + "epoch": 4.215784215784216, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 5278 + }, + { + "epoch": 4.216583416583417, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 5279 + }, + { + "epoch": 4.217382617382618, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 5280 + }, + { + "epoch": 4.218181818181818, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 5281 + }, + { + "epoch": 4.218981018981019, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9055, + "step": 5282 + }, + { + "epoch": 4.21978021978022, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8994, + "step": 5283 + }, + { + "epoch": 4.220579420579421, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 5284 + }, + { + "epoch": 4.221378621378621, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.8944, + "step": 5285 + }, + { + "epoch": 4.222177822177822, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 5286 + }, + { + "epoch": 4.222977022977023, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.8957, + "step": 5287 + }, + { + "epoch": 4.223776223776224, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.9039, + "step": 5288 + }, + { + "epoch": 4.224575424575424, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.9012, + "step": 5289 + }, + { + "epoch": 4.225374625374625, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 5290 + }, + { + "epoch": 4.226173826173826, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.9188, + "step": 5291 + }, + { + "epoch": 4.226973026973027, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.9001, + "step": 5292 + }, + { + "epoch": 4.227772227772228, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 5293 + }, + { + "epoch": 4.228571428571429, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 5294 + }, + { + "epoch": 4.22937062937063, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 5295 + }, + { + "epoch": 4.23016983016983, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 5296 + }, + { + "epoch": 4.230969030969031, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9525, + "step": 5297 + }, + { + "epoch": 4.231768231768232, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.9055, + "step": 5298 + }, + { + "epoch": 4.232567432567433, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.8961, + "step": 5299 + }, + { + "epoch": 4.233366633366633, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.8944, + "step": 5300 + }, + { + "epoch": 4.234165834165834, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 5301 + }, + { + "epoch": 4.234965034965035, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 5302 + }, + { + "epoch": 4.235764235764236, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.9059, + "step": 5303 + }, + { + "epoch": 4.236563436563436, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.8964, + "step": 5304 + }, + { + "epoch": 4.237362637362637, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.9058, + "step": 5305 + }, + { + "epoch": 4.2381618381618384, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.8942, + "step": 5306 + }, + { + "epoch": 4.238961038961039, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9001, + "step": 5307 + }, + { + "epoch": 4.23976023976024, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.898, + "step": 5308 + }, + { + "epoch": 4.240559440559441, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.8926, + "step": 5309 + }, + { + "epoch": 4.241358641358642, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8978, + "step": 5310 + }, + { + "epoch": 4.242157842157842, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.9001, + "step": 5311 + }, + { + "epoch": 4.242957042957043, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 5312 + }, + { + "epoch": 4.243756243756244, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 5313 + }, + { + "epoch": 4.244555444555445, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.904, + "step": 5314 + }, + { + "epoch": 4.245354645354645, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 5315 + }, + { + "epoch": 4.246153846153846, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.9056, + "step": 5316 + }, + { + "epoch": 4.246953046953047, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.8998, + "step": 5317 + }, + { + "epoch": 4.247752247752247, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8974, + "step": 5318 + }, + { + "epoch": 4.248551448551448, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.8816, + "step": 5319 + }, + { + "epoch": 4.249350649350649, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.8997, + "step": 5320 + }, + { + "epoch": 4.2501498501498505, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.8969, + "step": 5321 + }, + { + "epoch": 4.250949050949051, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8991, + "step": 5322 + }, + { + "epoch": 4.251748251748252, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 5323 + }, + { + "epoch": 4.252547452547453, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 5324 + }, + { + "epoch": 4.253346653346654, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 5325 + }, + { + "epoch": 4.254145854145854, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8991, + "step": 5326 + }, + { + "epoch": 4.254945054945055, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.8839, + "step": 5327 + }, + { + "epoch": 4.255744255744256, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 5328 + }, + { + "epoch": 4.256543456543456, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 5329 + }, + { + "epoch": 4.257342657342657, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8899, + "step": 5330 + }, + { + "epoch": 4.258141858141858, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9025, + "step": 5331 + }, + { + "epoch": 4.258941058941059, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 5332 + }, + { + "epoch": 4.259740259740259, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.8977, + "step": 5333 + }, + { + "epoch": 4.26053946053946, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8998, + "step": 5334 + }, + { + "epoch": 4.2613386613386615, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.9025, + "step": 5335 + }, + { + "epoch": 4.2621378621378625, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8991, + "step": 5336 + }, + { + "epoch": 4.262937062937063, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 5337 + }, + { + "epoch": 4.263736263736264, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 5338 + }, + { + "epoch": 4.264535464535465, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.9023, + "step": 5339 + }, + { + "epoch": 4.265334665334665, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9018, + "step": 5340 + }, + { + "epoch": 4.266133866133866, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9025, + "step": 5341 + }, + { + "epoch": 4.266933066933067, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 5342 + }, + { + "epoch": 4.267732267732268, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 5343 + }, + { + "epoch": 4.268531468531468, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.8942, + "step": 5344 + }, + { + "epoch": 4.269330669330669, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.9055, + "step": 5345 + }, + { + "epoch": 4.27012987012987, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 5346 + }, + { + "epoch": 4.270929070929071, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9046, + "step": 5347 + }, + { + "epoch": 4.271728271728271, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 5348 + }, + { + "epoch": 4.2725274725274724, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.894, + "step": 5349 + }, + { + "epoch": 4.2733266733266735, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 5350 + }, + { + "epoch": 4.2741258741258745, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.8909, + "step": 5351 + }, + { + "epoch": 4.274925074925075, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9052, + "step": 5352 + }, + { + "epoch": 4.275724275724276, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.8997, + "step": 5353 + }, + { + "epoch": 4.276523476523477, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.8926, + "step": 5354 + }, + { + "epoch": 4.277322677322678, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.8906, + "step": 5355 + }, + { + "epoch": 4.278121878121878, + "grad_norm": 0.65234375, + "learning_rate": 0.0002, + "loss": 0.9391, + "step": 5356 + }, + { + "epoch": 4.278921078921079, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.8923, + "step": 5357 + }, + { + "epoch": 4.27972027972028, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 5358 + }, + { + "epoch": 4.28051948051948, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.8997, + "step": 5359 + }, + { + "epoch": 4.281318681318681, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.8964, + "step": 5360 + }, + { + "epoch": 4.282117882117882, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 5361 + }, + { + "epoch": 4.282917082917083, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 5362 + }, + { + "epoch": 4.283716283716283, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 5363 + }, + { + "epoch": 4.2845154845154845, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 5364 + }, + { + "epoch": 4.2853146853146855, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9021, + "step": 5365 + }, + { + "epoch": 4.2861138861138866, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 5366 + }, + { + "epoch": 4.286913086913087, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 5367 + }, + { + "epoch": 4.287712287712288, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.8974, + "step": 5368 + }, + { + "epoch": 4.288511488511489, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.8895, + "step": 5369 + }, + { + "epoch": 4.289310689310689, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.905, + "step": 5370 + }, + { + "epoch": 4.29010989010989, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 5371 + }, + { + "epoch": 4.290909090909091, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 5372 + }, + { + "epoch": 4.291708291708292, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 5373 + }, + { + "epoch": 4.292507492507492, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 5374 + }, + { + "epoch": 4.293306693306693, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 5375 + }, + { + "epoch": 4.294105894105894, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 5376 + }, + { + "epoch": 4.294905094905095, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 5377 + }, + { + "epoch": 4.2957042957042955, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.9041, + "step": 5378 + }, + { + "epoch": 4.2965034965034965, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9015, + "step": 5379 + }, + { + "epoch": 4.2973026973026975, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 5380 + }, + { + "epoch": 4.298101898101898, + "grad_norm": 2.40625, + "learning_rate": 0.0002, + "loss": 0.9346, + "step": 5381 + }, + { + "epoch": 4.298901098901099, + "grad_norm": 0.62109375, + "learning_rate": 0.0002, + "loss": 0.9056, + "step": 5382 + }, + { + "epoch": 4.2997002997003, + "grad_norm": 0.63671875, + "learning_rate": 0.0002, + "loss": 0.9055, + "step": 5383 + }, + { + "epoch": 4.300499500499501, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 5384 + }, + { + "epoch": 4.301298701298701, + "grad_norm": 0.66796875, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 5385 + }, + { + "epoch": 4.302097902097902, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.9043, + "step": 5386 + }, + { + "epoch": 4.302897102897103, + "grad_norm": 0.62109375, + "learning_rate": 0.0002, + "loss": 0.8973, + "step": 5387 + }, + { + "epoch": 4.303696303696304, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.8998, + "step": 5388 + }, + { + "epoch": 4.304495504495504, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.8891, + "step": 5389 + }, + { + "epoch": 4.305294705294705, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.8979, + "step": 5390 + }, + { + "epoch": 4.306093906093906, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 5391 + }, + { + "epoch": 4.3068931068931064, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 5392 + }, + { + "epoch": 4.3076923076923075, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 5393 + }, + { + "epoch": 4.3084915084915085, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.8911, + "step": 5394 + }, + { + "epoch": 4.30929070929071, + "grad_norm": 0.60546875, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 5395 + }, + { + "epoch": 4.31008991008991, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.8944, + "step": 5396 + }, + { + "epoch": 4.310889110889111, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.8997, + "step": 5397 + }, + { + "epoch": 4.311688311688312, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.8999, + "step": 5398 + }, + { + "epoch": 4.312487512487513, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.8964, + "step": 5399 + }, + { + "epoch": 4.313286713286713, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.9003, + "step": 5400 + }, + { + "epoch": 4.314085914085914, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.8987, + "step": 5401 + }, + { + "epoch": 4.314885114885115, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.9005, + "step": 5402 + }, + { + "epoch": 4.315684315684316, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.8891, + "step": 5403 + }, + { + "epoch": 4.316483516483516, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 5404 + }, + { + "epoch": 4.317282717282717, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.8974, + "step": 5405 + }, + { + "epoch": 4.318081918081918, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 5406 + }, + { + "epoch": 4.3188811188811185, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.8978, + "step": 5407 + }, + { + "epoch": 4.3196803196803195, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.8965, + "step": 5408 + }, + { + "epoch": 4.3204795204795206, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.8992, + "step": 5409 + }, + { + "epoch": 4.321278721278722, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.9053, + "step": 5410 + }, + { + "epoch": 4.322077922077922, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 5411 + }, + { + "epoch": 4.322877122877123, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.8994, + "step": 5412 + }, + { + "epoch": 4.323676323676324, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 5413 + }, + { + "epoch": 4.324475524475525, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 5414 + }, + { + "epoch": 4.325274725274725, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 5415 + }, + { + "epoch": 4.326073926073926, + "grad_norm": 0.578125, + "learning_rate": 0.0002, + "loss": 0.9037, + "step": 5416 + }, + { + "epoch": 4.326873126873127, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.8914, + "step": 5417 + }, + { + "epoch": 4.327672327672328, + "grad_norm": 0.59765625, + "learning_rate": 0.0002, + "loss": 0.9017, + "step": 5418 + }, + { + "epoch": 4.328471528471528, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.8842, + "step": 5419 + }, + { + "epoch": 4.329270729270729, + "grad_norm": 0.6171875, + "learning_rate": 0.0002, + "loss": 0.8879, + "step": 5420 + }, + { + "epoch": 4.33006993006993, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9043, + "step": 5421 + }, + { + "epoch": 4.3308691308691305, + "grad_norm": 0.65625, + "learning_rate": 0.0002, + "loss": 0.8858, + "step": 5422 + }, + { + "epoch": 4.3316683316683315, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 5423 + }, + { + "epoch": 4.332467532467533, + "grad_norm": 0.6796875, + "learning_rate": 0.0002, + "loss": 0.9046, + "step": 5424 + }, + { + "epoch": 4.333266733266734, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9022, + "step": 5425 + }, + { + "epoch": 4.334065934065934, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.9007, + "step": 5426 + }, + { + "epoch": 4.334865134865135, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9059, + "step": 5427 + }, + { + "epoch": 4.335664335664336, + "grad_norm": 0.640625, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 5428 + }, + { + "epoch": 4.336463536463537, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9055, + "step": 5429 + }, + { + "epoch": 4.337262737262737, + "grad_norm": 0.6640625, + "learning_rate": 0.0002, + "loss": 0.8965, + "step": 5430 + }, + { + "epoch": 4.338061938061938, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.8993, + "step": 5431 + }, + { + "epoch": 4.338861138861139, + "grad_norm": 0.6484375, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 5432 + }, + { + "epoch": 4.339660339660339, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 5433 + }, + { + "epoch": 4.34045954045954, + "grad_norm": 0.59375, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 5434 + }, + { + "epoch": 4.341258741258741, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 5435 + }, + { + "epoch": 4.342057942057942, + "grad_norm": 0.578125, + "learning_rate": 0.0002, + "loss": 0.8889, + "step": 5436 + }, + { + "epoch": 4.3428571428571425, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 5437 + }, + { + "epoch": 4.343656343656344, + "grad_norm": 0.6015625, + "learning_rate": 0.0002, + "loss": 0.9021, + "step": 5438 + }, + { + "epoch": 4.344455544455545, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 5439 + }, + { + "epoch": 4.345254745254746, + "grad_norm": 0.61328125, + "learning_rate": 0.0002, + "loss": 0.9025, + "step": 5440 + }, + { + "epoch": 4.346053946053946, + "grad_norm": 1.2890625, + "learning_rate": 0.0002, + "loss": 0.9077, + "step": 5441 + }, + { + "epoch": 4.346853146853147, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 5442 + }, + { + "epoch": 4.347652347652348, + "grad_norm": 0.6484375, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 5443 + }, + { + "epoch": 4.348451548451548, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.9001, + "step": 5444 + }, + { + "epoch": 4.349250749250749, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 5445 + }, + { + "epoch": 4.35004995004995, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 0.8926, + "step": 5446 + }, + { + "epoch": 4.350849150849151, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 5447 + }, + { + "epoch": 4.351648351648351, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 5448 + }, + { + "epoch": 4.352447552447552, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.9016, + "step": 5449 + }, + { + "epoch": 4.353246753246753, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.8879, + "step": 5450 + }, + { + "epoch": 4.354045954045954, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 5451 + }, + { + "epoch": 4.3548451548451546, + "grad_norm": 0.59375, + "learning_rate": 0.0002, + "loss": 0.8985, + "step": 5452 + }, + { + "epoch": 4.355644355644356, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 5453 + }, + { + "epoch": 4.356443556443557, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.8861, + "step": 5454 + }, + { + "epoch": 4.357242757242757, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 5455 + }, + { + "epoch": 4.358041958041958, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.8923, + "step": 5456 + }, + { + "epoch": 4.358841158841159, + "grad_norm": 0.609375, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 5457 + }, + { + "epoch": 4.35964035964036, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 5458 + }, + { + "epoch": 4.36043956043956, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.8923, + "step": 5459 + }, + { + "epoch": 4.361238761238761, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.9044, + "step": 5460 + }, + { + "epoch": 4.362037962037962, + "grad_norm": 0.578125, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 5461 + }, + { + "epoch": 4.362837162837163, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 5462 + }, + { + "epoch": 4.363636363636363, + "grad_norm": 0.65234375, + "learning_rate": 0.0002, + "loss": 0.8961, + "step": 5463 + }, + { + "epoch": 4.364435564435564, + "grad_norm": 0.76171875, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 5464 + }, + { + "epoch": 4.365234765234765, + "grad_norm": 0.95703125, + "learning_rate": 0.0002, + "loss": 0.8895, + "step": 5465 + }, + { + "epoch": 4.366033966033966, + "grad_norm": 0.65234375, + "learning_rate": 0.0002, + "loss": 0.8906, + "step": 5466 + }, + { + "epoch": 4.366833166833167, + "grad_norm": 0.63671875, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 5467 + }, + { + "epoch": 4.367632367632368, + "grad_norm": 1.0234375, + "learning_rate": 0.0002, + "loss": 0.905, + "step": 5468 + }, + { + "epoch": 4.368431568431569, + "grad_norm": 1.1015625, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 5469 + }, + { + "epoch": 4.36923076923077, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 5470 + }, + { + "epoch": 4.37002997002997, + "grad_norm": 0.98828125, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 5471 + }, + { + "epoch": 4.370829170829171, + "grad_norm": 0.90234375, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 5472 + }, + { + "epoch": 4.371628371628372, + "grad_norm": 0.9765625, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 5473 + }, + { + "epoch": 4.372427572427572, + "grad_norm": 0.8984375, + "learning_rate": 0.0002, + "loss": 0.8979, + "step": 5474 + }, + { + "epoch": 4.373226773226773, + "grad_norm": 0.62109375, + "learning_rate": 0.0002, + "loss": 0.8856, + "step": 5475 + }, + { + "epoch": 4.374025974025974, + "grad_norm": 0.8125, + "learning_rate": 0.0002, + "loss": 0.9014, + "step": 5476 + }, + { + "epoch": 4.374825174825175, + "grad_norm": 0.83203125, + "learning_rate": 0.0002, + "loss": 0.8926, + "step": 5477 + }, + { + "epoch": 4.375624375624375, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.9043, + "step": 5478 + }, + { + "epoch": 4.376423576423576, + "grad_norm": 0.61328125, + "learning_rate": 0.0002, + "loss": 0.9053, + "step": 5479 + }, + { + "epoch": 4.377222777222777, + "grad_norm": 0.60546875, + "learning_rate": 0.0002, + "loss": 0.9059, + "step": 5480 + }, + { + "epoch": 4.3780219780219785, + "grad_norm": 0.59375, + "learning_rate": 0.0002, + "loss": 0.9036, + "step": 5481 + }, + { + "epoch": 4.378821178821179, + "grad_norm": 0.70703125, + "learning_rate": 0.0002, + "loss": 0.8977, + "step": 5482 + }, + { + "epoch": 4.37962037962038, + "grad_norm": 0.81640625, + "learning_rate": 0.0002, + "loss": 0.9047, + "step": 5483 + }, + { + "epoch": 4.380419580419581, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9001, + "step": 5484 + }, + { + "epoch": 4.381218781218781, + "grad_norm": 0.66015625, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 5485 + }, + { + "epoch": 4.382017982017982, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.8955, + "step": 5486 + }, + { + "epoch": 4.382817182817183, + "grad_norm": 0.6796875, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 5487 + }, + { + "epoch": 4.383616383616384, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 5488 + }, + { + "epoch": 4.384415584415584, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9012, + "step": 5489 + }, + { + "epoch": 4.385214785214785, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 5490 + }, + { + "epoch": 4.386013986013986, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9026, + "step": 5491 + }, + { + "epoch": 4.386813186813187, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 5492 + }, + { + "epoch": 4.387612387612387, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.8995, + "step": 5493 + }, + { + "epoch": 4.388411588411588, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 5494 + }, + { + "epoch": 4.389210789210789, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.8851, + "step": 5495 + }, + { + "epoch": 4.39000999000999, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 5496 + }, + { + "epoch": 4.390809190809191, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 5497 + }, + { + "epoch": 4.391608391608392, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 5498 + }, + { + "epoch": 4.392407592407593, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 5499 + }, + { + "epoch": 4.393206793206793, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 5500 + }, + { + "epoch": 4.394005994005994, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8983, + "step": 5501 + }, + { + "epoch": 4.394805194805195, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 5502 + }, + { + "epoch": 4.395604395604396, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9023, + "step": 5503 + }, + { + "epoch": 4.396403596403596, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8911, + "step": 5504 + }, + { + "epoch": 4.397202797202797, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 5505 + }, + { + "epoch": 4.398001998001998, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.9084, + "step": 5506 + }, + { + "epoch": 4.398801198801198, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 5507 + }, + { + "epoch": 4.399600399600399, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 5508 + }, + { + "epoch": 4.4003996003996, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8914, + "step": 5509 + }, + { + "epoch": 4.4011988011988015, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.9012, + "step": 5510 + }, + { + "epoch": 4.401998001998002, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.9012, + "step": 5511 + }, + { + "epoch": 4.402797202797203, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 5512 + }, + { + "epoch": 4.403596403596404, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 5513 + }, + { + "epoch": 4.404395604395605, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 5514 + }, + { + "epoch": 4.405194805194805, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 5515 + }, + { + "epoch": 4.405994005994006, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 5516 + }, + { + "epoch": 4.406793206793207, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 5517 + }, + { + "epoch": 4.407592407592408, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 5518 + }, + { + "epoch": 4.408391608391608, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8964, + "step": 5519 + }, + { + "epoch": 4.409190809190809, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 5520 + }, + { + "epoch": 4.40999000999001, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 5521 + }, + { + "epoch": 4.41078921078921, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 5522 + }, + { + "epoch": 4.411588411588411, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.8966, + "step": 5523 + }, + { + "epoch": 4.4123876123876125, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 5524 + }, + { + "epoch": 4.4131868131868135, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.894, + "step": 5525 + }, + { + "epoch": 4.413986013986014, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 5526 + }, + { + "epoch": 4.414785214785215, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 5527 + }, + { + "epoch": 4.415584415584416, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 5528 + }, + { + "epoch": 4.416383616383617, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 5529 + }, + { + "epoch": 4.417182817182817, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 5530 + }, + { + "epoch": 4.417982017982018, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 5531 + }, + { + "epoch": 4.418781218781219, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8974, + "step": 5532 + }, + { + "epoch": 4.41958041958042, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8886, + "step": 5533 + }, + { + "epoch": 4.42037962037962, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.9035, + "step": 5534 + }, + { + "epoch": 4.421178821178821, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 5535 + }, + { + "epoch": 4.421978021978022, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 5536 + }, + { + "epoch": 4.422777222777222, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8906, + "step": 5537 + }, + { + "epoch": 4.4235764235764234, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8995, + "step": 5538 + }, + { + "epoch": 4.4243756243756245, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.9034, + "step": 5539 + }, + { + "epoch": 4.4251748251748255, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 5540 + }, + { + "epoch": 4.425974025974026, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 5541 + }, + { + "epoch": 4.426773226773227, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8965, + "step": 5542 + }, + { + "epoch": 4.427572427572428, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 5543 + }, + { + "epoch": 4.428371628371629, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 5544 + }, + { + "epoch": 4.429170829170829, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.9064, + "step": 5545 + }, + { + "epoch": 4.42997002997003, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 5546 + }, + { + "epoch": 4.430769230769231, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 5547 + }, + { + "epoch": 4.431568431568431, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 5548 + }, + { + "epoch": 4.432367632367632, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.9006, + "step": 5549 + }, + { + "epoch": 4.433166833166833, + "grad_norm": 0.76171875, + "learning_rate": 0.0002, + "loss": 0.9011, + "step": 5550 + }, + { + "epoch": 4.433966033966034, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.903, + "step": 5551 + }, + { + "epoch": 4.434765234765234, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.9081, + "step": 5552 + }, + { + "epoch": 4.4355644355644355, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8847, + "step": 5553 + }, + { + "epoch": 4.4363636363636365, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8869, + "step": 5554 + }, + { + "epoch": 4.4371628371628375, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 5555 + }, + { + "epoch": 4.437962037962038, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 5556 + }, + { + "epoch": 4.438761238761239, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 5557 + }, + { + "epoch": 4.43956043956044, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.9021, + "step": 5558 + }, + { + "epoch": 4.44035964035964, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 5559 + }, + { + "epoch": 4.441158841158841, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 5560 + }, + { + "epoch": 4.441958041958042, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8957, + "step": 5561 + }, + { + "epoch": 4.442757242757243, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8961, + "step": 5562 + }, + { + "epoch": 4.443556443556443, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.9021, + "step": 5563 + }, + { + "epoch": 4.444355644355644, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 5564 + }, + { + "epoch": 4.445154845154845, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 5565 + }, + { + "epoch": 4.445954045954046, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 5566 + }, + { + "epoch": 4.4467532467532465, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8989, + "step": 5567 + }, + { + "epoch": 4.4475524475524475, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8995, + "step": 5568 + }, + { + "epoch": 4.4483516483516485, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 5569 + }, + { + "epoch": 4.449150849150849, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8968, + "step": 5570 + }, + { + "epoch": 4.44995004995005, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.9069, + "step": 5571 + }, + { + "epoch": 4.450749250749251, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.9016, + "step": 5572 + }, + { + "epoch": 4.451548451548452, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8989, + "step": 5573 + }, + { + "epoch": 4.452347652347652, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 5574 + }, + { + "epoch": 4.453146853146853, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8969, + "step": 5575 + }, + { + "epoch": 4.453946053946054, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.9061, + "step": 5576 + }, + { + "epoch": 4.454745254745255, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 5577 + }, + { + "epoch": 4.455544455544455, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8995, + "step": 5578 + }, + { + "epoch": 4.456343656343656, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.881, + "step": 5579 + }, + { + "epoch": 4.457142857142857, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.902, + "step": 5580 + }, + { + "epoch": 4.457942057942058, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 5581 + }, + { + "epoch": 4.4587412587412585, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8973, + "step": 5582 + }, + { + "epoch": 4.4595404595404595, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8942, + "step": 5583 + }, + { + "epoch": 4.460339660339661, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.9008, + "step": 5584 + }, + { + "epoch": 4.461138861138862, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 5585 + }, + { + "epoch": 4.461938061938062, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 5586 + }, + { + "epoch": 4.462737262737263, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 5587 + }, + { + "epoch": 4.463536463536464, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8871, + "step": 5588 + }, + { + "epoch": 4.464335664335664, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 5589 + }, + { + "epoch": 4.465134865134865, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8938, + "step": 5590 + }, + { + "epoch": 4.465934065934066, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8944, + "step": 5591 + }, + { + "epoch": 4.466733266733267, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 5592 + }, + { + "epoch": 4.467532467532467, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 5593 + }, + { + "epoch": 4.468331668331668, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.8995, + "step": 5594 + }, + { + "epoch": 4.469130869130869, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.881, + "step": 5595 + }, + { + "epoch": 4.46993006993007, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8992, + "step": 5596 + }, + { + "epoch": 4.4707292707292705, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8955, + "step": 5597 + }, + { + "epoch": 4.4715284715284715, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8895, + "step": 5598 + }, + { + "epoch": 4.472327672327673, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8889, + "step": 5599 + }, + { + "epoch": 4.473126873126873, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8893, + "step": 5600 + }, + { + "epoch": 4.473926073926074, + "grad_norm": 0.25390625, + "learning_rate": 0.0002, + "loss": 0.9012, + "step": 5601 + }, + { + "epoch": 4.474725274725275, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8917, + "step": 5602 + }, + { + "epoch": 4.475524475524476, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 5603 + }, + { + "epoch": 4.476323676323676, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 5604 + }, + { + "epoch": 4.477122877122877, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 5605 + }, + { + "epoch": 4.477922077922078, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 5606 + }, + { + "epoch": 4.478721278721279, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 5607 + }, + { + "epoch": 4.479520479520479, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 5608 + }, + { + "epoch": 4.48031968031968, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 5609 + }, + { + "epoch": 4.481118881118881, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8995, + "step": 5610 + }, + { + "epoch": 4.4819180819180815, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.9014, + "step": 5611 + }, + { + "epoch": 4.4827172827172825, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.9021, + "step": 5612 + }, + { + "epoch": 4.483516483516484, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 5613 + }, + { + "epoch": 4.484315684315685, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8965, + "step": 5614 + }, + { + "epoch": 4.485114885114885, + "grad_norm": 0.25, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 5615 + }, + { + "epoch": 4.485914085914086, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8942, + "step": 5616 + }, + { + "epoch": 4.486713286713287, + "grad_norm": 0.2421875, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 5617 + }, + { + "epoch": 4.487512487512488, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 5618 + }, + { + "epoch": 4.488311688311688, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8864, + "step": 5619 + }, + { + "epoch": 4.489110889110889, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 5620 + }, + { + "epoch": 4.48991008991009, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 5621 + }, + { + "epoch": 4.49070929070929, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.9057, + "step": 5622 + }, + { + "epoch": 4.491508491508491, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8993, + "step": 5623 + }, + { + "epoch": 4.492307692307692, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 5624 + }, + { + "epoch": 4.493106893106893, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 5625 + }, + { + "epoch": 4.4939060939060935, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 5626 + }, + { + "epoch": 4.494705294705295, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 5627 + }, + { + "epoch": 4.495504495504496, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 5628 + }, + { + "epoch": 4.496303696303697, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8969, + "step": 5629 + }, + { + "epoch": 4.497102897102897, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.9023, + "step": 5630 + }, + { + "epoch": 4.497902097902098, + "grad_norm": 0.2490234375, + "learning_rate": 0.0002, + "loss": 0.8961, + "step": 5631 + }, + { + "epoch": 4.498701298701299, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 5632 + }, + { + "epoch": 4.4995004995005, + "grad_norm": 0.25390625, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 5633 + }, + { + "epoch": 4.5002997002997, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8966, + "step": 5634 + }, + { + "epoch": 4.501098901098901, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8867, + "step": 5635 + }, + { + "epoch": 4.501898101898102, + "grad_norm": 0.25390625, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 5636 + }, + { + "epoch": 4.502697302697303, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.9019, + "step": 5637 + }, + { + "epoch": 4.503496503496503, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 5638 + }, + { + "epoch": 4.504295704295704, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.9031, + "step": 5639 + }, + { + "epoch": 4.505094905094905, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 5640 + }, + { + "epoch": 4.5058941058941056, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 5641 + }, + { + "epoch": 4.506693306693307, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.8827, + "step": 5642 + }, + { + "epoch": 4.507492507492508, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.897, + "step": 5643 + }, + { + "epoch": 4.508291708291709, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 5644 + }, + { + "epoch": 4.509090909090909, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 5645 + }, + { + "epoch": 4.50989010989011, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 5646 + }, + { + "epoch": 4.510689310689311, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8938, + "step": 5647 + }, + { + "epoch": 4.511488511488512, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8973, + "step": 5648 + }, + { + "epoch": 4.512287712287712, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 5649 + }, + { + "epoch": 4.513086913086913, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8979, + "step": 5650 + }, + { + "epoch": 4.513886113886114, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 5651 + }, + { + "epoch": 4.514685314685314, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.9442, + "step": 5652 + }, + { + "epoch": 4.515484515484515, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.884, + "step": 5653 + }, + { + "epoch": 4.516283716283716, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8895, + "step": 5654 + }, + { + "epoch": 4.517082917082917, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 5655 + }, + { + "epoch": 4.517882117882118, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 5656 + }, + { + "epoch": 4.518681318681319, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 5657 + }, + { + "epoch": 4.51948051948052, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 5658 + }, + { + "epoch": 4.520279720279721, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 5659 + }, + { + "epoch": 4.521078921078921, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 5660 + }, + { + "epoch": 4.521878121878122, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 5661 + }, + { + "epoch": 4.522677322677323, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.9031, + "step": 5662 + }, + { + "epoch": 4.523476523476523, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 5663 + }, + { + "epoch": 4.524275724275724, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 5664 + }, + { + "epoch": 4.525074925074925, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 5665 + }, + { + "epoch": 4.525874125874126, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.8997, + "step": 5666 + }, + { + "epoch": 4.526673326673326, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 5667 + }, + { + "epoch": 4.527472527472527, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 5668 + }, + { + "epoch": 4.528271728271728, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.9021, + "step": 5669 + }, + { + "epoch": 4.5290709290709295, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.9049, + "step": 5670 + }, + { + "epoch": 4.52987012987013, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8955, + "step": 5671 + }, + { + "epoch": 4.530669330669331, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 5672 + }, + { + "epoch": 4.531468531468532, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 5673 + }, + { + "epoch": 4.532267732267732, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 5674 + }, + { + "epoch": 4.533066933066933, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.9024, + "step": 5675 + }, + { + "epoch": 4.533866133866134, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.8911, + "step": 5676 + }, + { + "epoch": 4.534665334665335, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 5677 + }, + { + "epoch": 4.535464535464535, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 5678 + }, + { + "epoch": 4.536263736263736, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.8999, + "step": 5679 + }, + { + "epoch": 4.537062937062937, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.9003, + "step": 5680 + }, + { + "epoch": 4.537862137862138, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 5681 + }, + { + "epoch": 4.538661338661338, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.8837, + "step": 5682 + }, + { + "epoch": 4.539460539460539, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 5683 + }, + { + "epoch": 4.54025974025974, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8911, + "step": 5684 + }, + { + "epoch": 4.541058941058941, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 5685 + }, + { + "epoch": 4.541858141858142, + "grad_norm": 0.94140625, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 5686 + }, + { + "epoch": 4.542657342657343, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.9023, + "step": 5687 + }, + { + "epoch": 4.543456543456544, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8955, + "step": 5688 + }, + { + "epoch": 4.544255744255745, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8867, + "step": 5689 + }, + { + "epoch": 4.545054945054945, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 5690 + }, + { + "epoch": 4.545854145854146, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.9029, + "step": 5691 + }, + { + "epoch": 4.546653346653347, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 5692 + }, + { + "epoch": 4.547452547452547, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 5693 + }, + { + "epoch": 4.548251748251748, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 5694 + }, + { + "epoch": 4.549050949050949, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8993, + "step": 5695 + }, + { + "epoch": 4.54985014985015, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 5696 + }, + { + "epoch": 4.55064935064935, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 5697 + }, + { + "epoch": 4.551448551448551, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 5698 + }, + { + "epoch": 4.5522477522477525, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 5699 + }, + { + "epoch": 4.5530469530469535, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 5700 + }, + { + "epoch": 4.553846153846154, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 5701 + }, + { + "epoch": 4.554645354645355, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 5702 + }, + { + "epoch": 4.555444555444556, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8858, + "step": 5703 + }, + { + "epoch": 4.556243756243756, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 5704 + }, + { + "epoch": 4.557042957042957, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 5705 + }, + { + "epoch": 4.557842157842158, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8938, + "step": 5706 + }, + { + "epoch": 4.558641358641359, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8987, + "step": 5707 + }, + { + "epoch": 4.559440559440559, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 5708 + }, + { + "epoch": 4.56023976023976, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 5709 + }, + { + "epoch": 4.561038961038961, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.9013, + "step": 5710 + }, + { + "epoch": 4.561838161838162, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 5711 + }, + { + "epoch": 4.562637362637362, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 5712 + }, + { + "epoch": 4.5634365634365635, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8978, + "step": 5713 + }, + { + "epoch": 4.5642357642357645, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.8942, + "step": 5714 + }, + { + "epoch": 4.565034965034965, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8972, + "step": 5715 + }, + { + "epoch": 4.565834165834166, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 5716 + }, + { + "epoch": 4.566633366633367, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 5717 + }, + { + "epoch": 4.567432567432568, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8891, + "step": 5718 + }, + { + "epoch": 4.568231768231768, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 5719 + }, + { + "epoch": 4.569030969030969, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 5720 + }, + { + "epoch": 4.56983016983017, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8871, + "step": 5721 + }, + { + "epoch": 4.570629370629371, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 5722 + }, + { + "epoch": 4.571428571428571, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.901, + "step": 5723 + }, + { + "epoch": 4.572227772227772, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.8994, + "step": 5724 + }, + { + "epoch": 4.573026973026973, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8978, + "step": 5725 + }, + { + "epoch": 4.573826173826173, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 5726 + }, + { + "epoch": 4.574625374625374, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8997, + "step": 5727 + }, + { + "epoch": 4.5754245754245755, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.898, + "step": 5728 + }, + { + "epoch": 4.5762237762237765, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.8993, + "step": 5729 + }, + { + "epoch": 4.577022977022977, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8999, + "step": 5730 + }, + { + "epoch": 4.577822177822178, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8999, + "step": 5731 + }, + { + "epoch": 4.578621378621379, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8899, + "step": 5732 + }, + { + "epoch": 4.57942057942058, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 5733 + }, + { + "epoch": 4.58021978021978, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 5734 + }, + { + "epoch": 4.581018981018981, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8966, + "step": 5735 + }, + { + "epoch": 4.581818181818182, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8983, + "step": 5736 + }, + { + "epoch": 4.582617382617382, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 5737 + }, + { + "epoch": 4.583416583416583, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 5738 + }, + { + "epoch": 4.584215784215784, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 5739 + }, + { + "epoch": 4.585014985014985, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 5740 + }, + { + "epoch": 4.585814185814185, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8987, + "step": 5741 + }, + { + "epoch": 4.5866133866133865, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 5742 + }, + { + "epoch": 4.5874125874125875, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 5743 + }, + { + "epoch": 4.5882117882117885, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.9014, + "step": 5744 + }, + { + "epoch": 4.589010989010989, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.9048, + "step": 5745 + }, + { + "epoch": 4.58981018981019, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 5746 + }, + { + "epoch": 4.590609390609391, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 5747 + }, + { + "epoch": 4.591408591408591, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 5748 + }, + { + "epoch": 4.592207792207792, + "grad_norm": 0.25, + "learning_rate": 0.0002, + "loss": 0.8917, + "step": 5749 + }, + { + "epoch": 4.593006993006993, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 5750 + }, + { + "epoch": 4.593806193806194, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8875, + "step": 5751 + }, + { + "epoch": 4.594605394605395, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8861, + "step": 5752 + }, + { + "epoch": 4.595404595404595, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8863, + "step": 5753 + }, + { + "epoch": 4.596203796203796, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8883, + "step": 5754 + }, + { + "epoch": 4.597002997002997, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 5755 + }, + { + "epoch": 4.5978021978021975, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.896, + "step": 5756 + }, + { + "epoch": 4.5986013986013985, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8968, + "step": 5757 + }, + { + "epoch": 4.5994005994005995, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8957, + "step": 5758 + }, + { + "epoch": 4.600199800199801, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.898, + "step": 5759 + }, + { + "epoch": 4.600999000999001, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 5760 + }, + { + "epoch": 4.601798201798202, + "grad_norm": 0.25390625, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 5761 + }, + { + "epoch": 4.602597402597403, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 5762 + }, + { + "epoch": 4.603396603396604, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8867, + "step": 5763 + }, + { + "epoch": 4.604195804195804, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8914, + "step": 5764 + }, + { + "epoch": 4.604995004995005, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8889, + "step": 5765 + }, + { + "epoch": 4.605794205794206, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 5766 + }, + { + "epoch": 4.606593406593406, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 5767 + }, + { + "epoch": 4.607392607392607, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.9051, + "step": 5768 + }, + { + "epoch": 4.608191808191808, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.8891, + "step": 5769 + }, + { + "epoch": 4.608991008991009, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 5770 + }, + { + "epoch": 4.6097902097902095, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 5771 + }, + { + "epoch": 4.6105894105894105, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.9019, + "step": 5772 + }, + { + "epoch": 4.611388611388612, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8829, + "step": 5773 + }, + { + "epoch": 4.612187812187813, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 5774 + }, + { + "epoch": 4.612987012987013, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 5775 + }, + { + "epoch": 4.613786213786214, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 5776 + }, + { + "epoch": 4.614585414585415, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.8978, + "step": 5777 + }, + { + "epoch": 4.615384615384615, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8972, + "step": 5778 + }, + { + "epoch": 4.616183816183816, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 5779 + }, + { + "epoch": 4.616983016983017, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 5780 + }, + { + "epoch": 4.617782217782218, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8966, + "step": 5781 + }, + { + "epoch": 4.618581418581418, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 5782 + }, + { + "epoch": 4.619380619380619, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 5783 + }, + { + "epoch": 4.62017982017982, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 5784 + }, + { + "epoch": 4.620979020979021, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 5785 + }, + { + "epoch": 4.6217782217782215, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.9013, + "step": 5786 + }, + { + "epoch": 4.6225774225774225, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8891, + "step": 5787 + }, + { + "epoch": 4.623376623376624, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8966, + "step": 5788 + }, + { + "epoch": 4.624175824175824, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 5789 + }, + { + "epoch": 4.624975024975025, + "grad_norm": 0.25390625, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 5790 + }, + { + "epoch": 4.625774225774226, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 5791 + }, + { + "epoch": 4.626573426573427, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 5792 + }, + { + "epoch": 4.627372627372627, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 5793 + }, + { + "epoch": 4.628171828171828, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.9019, + "step": 5794 + }, + { + "epoch": 4.628971028971029, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.8857, + "step": 5795 + }, + { + "epoch": 4.62977022977023, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 5796 + }, + { + "epoch": 4.63056943056943, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 5797 + }, + { + "epoch": 4.631368631368631, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8997, + "step": 5798 + }, + { + "epoch": 4.632167832167832, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8877, + "step": 5799 + }, + { + "epoch": 4.6329670329670325, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8869, + "step": 5800 + }, + { + "epoch": 4.6337662337662335, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 5801 + }, + { + "epoch": 4.634565434565435, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8863, + "step": 5802 + }, + { + "epoch": 4.635364635364636, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8886, + "step": 5803 + }, + { + "epoch": 4.636163836163837, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 5804 + }, + { + "epoch": 4.636963036963037, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8942, + "step": 5805 + }, + { + "epoch": 4.637762237762238, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8968, + "step": 5806 + }, + { + "epoch": 4.638561438561439, + "grad_norm": 0.9609375, + "learning_rate": 0.0002, + "loss": 0.898, + "step": 5807 + }, + { + "epoch": 4.639360639360639, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 5808 + }, + { + "epoch": 4.64015984015984, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.9015, + "step": 5809 + }, + { + "epoch": 4.640959040959041, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 5810 + }, + { + "epoch": 4.641758241758242, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 5811 + }, + { + "epoch": 4.642557442557442, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8866, + "step": 5812 + }, + { + "epoch": 4.643356643356643, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 5813 + }, + { + "epoch": 4.644155844155844, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 5814 + }, + { + "epoch": 4.644955044955045, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8914, + "step": 5815 + }, + { + "epoch": 4.645754245754246, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8909, + "step": 5816 + }, + { + "epoch": 4.646553446553447, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 5817 + }, + { + "epoch": 4.647352647352648, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.9022, + "step": 5818 + }, + { + "epoch": 4.648151848151848, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 5819 + }, + { + "epoch": 4.648951048951049, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 5820 + }, + { + "epoch": 4.64975024975025, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8992, + "step": 5821 + }, + { + "epoch": 4.650549450549451, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 5822 + }, + { + "epoch": 4.651348651348651, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.887, + "step": 5823 + }, + { + "epoch": 4.652147852147852, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8815, + "step": 5824 + }, + { + "epoch": 4.652947052947053, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 5825 + }, + { + "epoch": 4.653746253746254, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8999, + "step": 5826 + }, + { + "epoch": 4.654545454545454, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 5827 + }, + { + "epoch": 4.655344655344655, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 5828 + }, + { + "epoch": 4.656143856143856, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 5829 + }, + { + "epoch": 4.6569430569430565, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 5830 + }, + { + "epoch": 4.657742257742258, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.896, + "step": 5831 + }, + { + "epoch": 4.658541458541459, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 5832 + }, + { + "epoch": 4.65934065934066, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8961, + "step": 5833 + }, + { + "epoch": 4.66013986013986, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 5834 + }, + { + "epoch": 4.660939060939061, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 5835 + }, + { + "epoch": 4.661738261738262, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.9, + "step": 5836 + }, + { + "epoch": 4.662537462537463, + "grad_norm": 1.0234375, + "learning_rate": 0.0002, + "loss": 0.9316, + "step": 5837 + }, + { + "epoch": 4.663336663336663, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.9043, + "step": 5838 + }, + { + "epoch": 4.664135864135864, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 5839 + }, + { + "epoch": 4.664935064935065, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8827, + "step": 5840 + }, + { + "epoch": 4.665734265734265, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 5841 + }, + { + "epoch": 4.666533466533466, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 5842 + }, + { + "epoch": 4.667332667332667, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.9015, + "step": 5843 + }, + { + "epoch": 4.668131868131868, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 5844 + }, + { + "epoch": 4.668931068931069, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 5845 + }, + { + "epoch": 4.66973026973027, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 5846 + }, + { + "epoch": 4.670529470529471, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 5847 + }, + { + "epoch": 4.671328671328672, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 5848 + }, + { + "epoch": 4.672127872127872, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 5849 + }, + { + "epoch": 4.672927072927073, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 5850 + }, + { + "epoch": 4.673726273726274, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8985, + "step": 5851 + }, + { + "epoch": 4.674525474525474, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.9008, + "step": 5852 + }, + { + "epoch": 4.675324675324675, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8994, + "step": 5853 + }, + { + "epoch": 4.676123876123876, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 5854 + }, + { + "epoch": 4.676923076923077, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.9037, + "step": 5855 + }, + { + "epoch": 4.677722277722278, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8893, + "step": 5856 + }, + { + "epoch": 4.678521478521478, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8991, + "step": 5857 + }, + { + "epoch": 4.679320679320679, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 5858 + }, + { + "epoch": 4.6801198801198804, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.8966, + "step": 5859 + }, + { + "epoch": 4.680919080919081, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.8844, + "step": 5860 + }, + { + "epoch": 4.681718281718282, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 5861 + }, + { + "epoch": 4.682517482517483, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 5862 + }, + { + "epoch": 4.683316683316683, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 5863 + }, + { + "epoch": 4.684115884115884, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.8993, + "step": 5864 + }, + { + "epoch": 4.684915084915085, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 5865 + }, + { + "epoch": 4.685714285714286, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 5866 + }, + { + "epoch": 4.686513486513487, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 5867 + }, + { + "epoch": 4.687312687312687, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 5868 + }, + { + "epoch": 4.688111888111888, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 5869 + }, + { + "epoch": 4.688911088911089, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 5870 + }, + { + "epoch": 4.689710289710289, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.9046, + "step": 5871 + }, + { + "epoch": 4.69050949050949, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 5872 + }, + { + "epoch": 4.691308691308691, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.8886, + "step": 5873 + }, + { + "epoch": 4.6921078921078925, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.8914, + "step": 5874 + }, + { + "epoch": 4.692907092907093, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 5875 + }, + { + "epoch": 4.693706293706294, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.8899, + "step": 5876 + }, + { + "epoch": 4.694505494505495, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9024, + "step": 5877 + }, + { + "epoch": 4.695304695304696, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 5878 + }, + { + "epoch": 4.696103896103896, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 5879 + }, + { + "epoch": 4.696903096903097, + "grad_norm": 0.671875, + "learning_rate": 0.0002, + "loss": 0.9137, + "step": 5880 + }, + { + "epoch": 4.697702297702298, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.8841, + "step": 5881 + }, + { + "epoch": 4.698501498501498, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8864, + "step": 5882 + }, + { + "epoch": 4.699300699300699, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.8858, + "step": 5883 + }, + { + "epoch": 4.7000999000999, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 5884 + }, + { + "epoch": 4.700899100899101, + "grad_norm": 0.5859375, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 5885 + }, + { + "epoch": 4.701698301698301, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.9009, + "step": 5886 + }, + { + "epoch": 4.702497502497502, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.8983, + "step": 5887 + }, + { + "epoch": 4.7032967032967035, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 5888 + }, + { + "epoch": 4.7040959040959045, + "grad_norm": 0.76171875, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 5889 + }, + { + "epoch": 4.704895104895105, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.8883, + "step": 5890 + }, + { + "epoch": 4.705694305694306, + "grad_norm": 0.63671875, + "learning_rate": 0.0002, + "loss": 0.9046, + "step": 5891 + }, + { + "epoch": 4.706493506493507, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 5892 + }, + { + "epoch": 4.707292707292707, + "grad_norm": 0.73046875, + "learning_rate": 0.0002, + "loss": 0.909, + "step": 5893 + }, + { + "epoch": 4.708091908091908, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9089, + "step": 5894 + }, + { + "epoch": 4.708891108891109, + "grad_norm": 0.6171875, + "learning_rate": 0.0002, + "loss": 0.8985, + "step": 5895 + }, + { + "epoch": 4.70969030969031, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.8965, + "step": 5896 + }, + { + "epoch": 4.71048951048951, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.8877, + "step": 5897 + }, + { + "epoch": 4.711288711288711, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 5898 + }, + { + "epoch": 4.712087912087912, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.9062, + "step": 5899 + }, + { + "epoch": 4.712887112887113, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 5900 + }, + { + "epoch": 4.713686313686313, + "grad_norm": 0.76171875, + "learning_rate": 0.0002, + "loss": 0.9077, + "step": 5901 + }, + { + "epoch": 4.7144855144855145, + "grad_norm": 0.70703125, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 5902 + }, + { + "epoch": 4.7152847152847155, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 5903 + }, + { + "epoch": 4.716083916083916, + "grad_norm": 0.6015625, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 5904 + }, + { + "epoch": 4.716883116883117, + "grad_norm": 0.6953125, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 5905 + }, + { + "epoch": 4.717682317682318, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 5906 + }, + { + "epoch": 4.718481518481519, + "grad_norm": 0.7265625, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 5907 + }, + { + "epoch": 4.719280719280719, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.9021, + "step": 5908 + }, + { + "epoch": 4.72007992007992, + "grad_norm": 0.66796875, + "learning_rate": 0.0002, + "loss": 0.9029, + "step": 5909 + }, + { + "epoch": 4.720879120879121, + "grad_norm": 0.6484375, + "learning_rate": 0.0002, + "loss": 0.881, + "step": 5910 + }, + { + "epoch": 4.721678321678322, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.8869, + "step": 5911 + }, + { + "epoch": 4.722477522477522, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 5912 + }, + { + "epoch": 4.723276723276723, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.8857, + "step": 5913 + }, + { + "epoch": 4.724075924075924, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.8995, + "step": 5914 + }, + { + "epoch": 4.724875124875124, + "grad_norm": 0.58203125, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 5915 + }, + { + "epoch": 4.725674325674325, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 5916 + }, + { + "epoch": 4.7264735264735265, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 5917 + }, + { + "epoch": 4.7272727272727275, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.8987, + "step": 5918 + }, + { + "epoch": 4.7280719280719286, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.8877, + "step": 5919 + }, + { + "epoch": 4.728871128871129, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.9006, + "step": 5920 + }, + { + "epoch": 4.72967032967033, + "grad_norm": 0.63671875, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 5921 + }, + { + "epoch": 4.730469530469531, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 5922 + }, + { + "epoch": 4.731268731268731, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.8895, + "step": 5923 + }, + { + "epoch": 4.732067932067932, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.8872, + "step": 5924 + }, + { + "epoch": 4.732867132867133, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8879, + "step": 5925 + }, + { + "epoch": 4.733666333666334, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.9023, + "step": 5926 + }, + { + "epoch": 4.734465534465534, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 5927 + }, + { + "epoch": 4.735264735264735, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 5928 + }, + { + "epoch": 4.736063936063936, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 5929 + }, + { + "epoch": 4.736863136863137, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 5930 + }, + { + "epoch": 4.7376623376623375, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 5931 + }, + { + "epoch": 4.7384615384615385, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 5932 + }, + { + "epoch": 4.7392607392607395, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 5933 + }, + { + "epoch": 4.74005994005994, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 5934 + }, + { + "epoch": 4.740859140859141, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8849, + "step": 5935 + }, + { + "epoch": 4.741658341658342, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8864, + "step": 5936 + }, + { + "epoch": 4.742457542457543, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8968, + "step": 5937 + }, + { + "epoch": 4.743256743256743, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8938, + "step": 5938 + }, + { + "epoch": 4.744055944055944, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 5939 + }, + { + "epoch": 4.744855144855145, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8983, + "step": 5940 + }, + { + "epoch": 4.745654345654346, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.894, + "step": 5941 + }, + { + "epoch": 4.746453546453546, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8914, + "step": 5942 + }, + { + "epoch": 4.747252747252747, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 5943 + }, + { + "epoch": 4.748051948051948, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 5944 + }, + { + "epoch": 4.7488511488511485, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 5945 + }, + { + "epoch": 4.7496503496503495, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 5946 + }, + { + "epoch": 4.7504495504495505, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 5947 + }, + { + "epoch": 4.751248751248752, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.9011, + "step": 5948 + }, + { + "epoch": 4.752047952047952, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8992, + "step": 5949 + }, + { + "epoch": 4.752847152847153, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 5950 + }, + { + "epoch": 4.753646353646354, + "grad_norm": 0.25390625, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 5951 + }, + { + "epoch": 4.754445554445555, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8993, + "step": 5952 + }, + { + "epoch": 4.755244755244755, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8893, + "step": 5953 + }, + { + "epoch": 4.756043956043956, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.9, + "step": 5954 + }, + { + "epoch": 4.756843156843157, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8891, + "step": 5955 + }, + { + "epoch": 4.757642357642357, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8987, + "step": 5956 + }, + { + "epoch": 4.758441558441558, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 5957 + }, + { + "epoch": 4.759240759240759, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8977, + "step": 5958 + }, + { + "epoch": 4.76003996003996, + "grad_norm": 0.7265625, + "learning_rate": 0.0002, + "loss": 0.9167, + "step": 5959 + }, + { + "epoch": 4.7608391608391605, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.8995, + "step": 5960 + }, + { + "epoch": 4.7616383616383615, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.9032, + "step": 5961 + }, + { + "epoch": 4.7624375624375626, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 5962 + }, + { + "epoch": 4.763236763236764, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 5963 + }, + { + "epoch": 4.764035964035964, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 5964 + }, + { + "epoch": 4.764835164835165, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.9039, + "step": 5965 + }, + { + "epoch": 4.765634365634366, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 5966 + }, + { + "epoch": 4.766433566433566, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.9, + "step": 5967 + }, + { + "epoch": 4.767232767232767, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.9023, + "step": 5968 + }, + { + "epoch": 4.768031968031968, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 5969 + }, + { + "epoch": 4.768831168831169, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 5970 + }, + { + "epoch": 4.76963036963037, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 5971 + }, + { + "epoch": 4.77042957042957, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.887, + "step": 5972 + }, + { + "epoch": 4.771228771228771, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 5973 + }, + { + "epoch": 4.772027972027972, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 5974 + }, + { + "epoch": 4.7728271728271725, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 5975 + }, + { + "epoch": 4.7736263736263735, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.8974, + "step": 5976 + }, + { + "epoch": 4.774425574425575, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 5977 + }, + { + "epoch": 4.775224775224775, + "grad_norm": 0.6015625, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 5978 + }, + { + "epoch": 4.776023976023976, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 5979 + }, + { + "epoch": 4.776823176823177, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8989, + "step": 5980 + }, + { + "epoch": 4.777622377622378, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.8972, + "step": 5981 + }, + { + "epoch": 4.778421578421579, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8993, + "step": 5982 + }, + { + "epoch": 4.779220779220779, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 5983 + }, + { + "epoch": 4.78001998001998, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.9021, + "step": 5984 + }, + { + "epoch": 4.780819180819181, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8893, + "step": 5985 + }, + { + "epoch": 4.781618381618381, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8861, + "step": 5986 + }, + { + "epoch": 4.782417582417582, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 5987 + }, + { + "epoch": 4.783216783216783, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 5988 + }, + { + "epoch": 4.784015984015984, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8884, + "step": 5989 + }, + { + "epoch": 4.7848151848151845, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 5990 + }, + { + "epoch": 4.785614385614386, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 5991 + }, + { + "epoch": 4.786413586413587, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8977, + "step": 5992 + }, + { + "epoch": 4.787212787212788, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 5993 + }, + { + "epoch": 4.788011988011988, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 5994 + }, + { + "epoch": 4.788811188811189, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.9167, + "step": 5995 + }, + { + "epoch": 4.78961038961039, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 5996 + }, + { + "epoch": 4.79040959040959, + "grad_norm": 1.0625, + "learning_rate": 0.0002, + "loss": 0.9225, + "step": 5997 + }, + { + "epoch": 4.791208791208791, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8906, + "step": 5998 + }, + { + "epoch": 4.792007992007992, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 5999 + }, + { + "epoch": 4.792807192807193, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8965, + "step": 6000 + }, + { + "epoch": 4.793606393606393, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8985, + "step": 6001 + }, + { + "epoch": 4.794405594405594, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8843, + "step": 6002 + }, + { + "epoch": 4.795204795204795, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.8867, + "step": 6003 + }, + { + "epoch": 4.796003996003996, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 6004 + }, + { + "epoch": 4.796803196803197, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 6005 + }, + { + "epoch": 4.797602397602398, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 6006 + }, + { + "epoch": 4.798401598401599, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 6007 + }, + { + "epoch": 4.799200799200799, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.8911, + "step": 6008 + }, + { + "epoch": 4.8, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.8891, + "step": 6009 + }, + { + "epoch": 4.800799200799201, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.8955, + "step": 6010 + }, + { + "epoch": 4.801598401598402, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 6011 + }, + { + "epoch": 4.802397602397602, + "grad_norm": 0.62890625, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 6012 + }, + { + "epoch": 4.803196803196803, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 6013 + }, + { + "epoch": 4.803996003996004, + "grad_norm": 0.6953125, + "learning_rate": 0.0002, + "loss": 0.8926, + "step": 6014 + }, + { + "epoch": 4.804795204795205, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 6015 + }, + { + "epoch": 4.805594405594405, + "grad_norm": 0.6484375, + "learning_rate": 0.0002, + "loss": 0.9026, + "step": 6016 + }, + { + "epoch": 4.806393606393606, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.9025, + "step": 6017 + }, + { + "epoch": 4.807192807192807, + "grad_norm": 0.640625, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 6018 + }, + { + "epoch": 4.8079920079920075, + "grad_norm": 0.58203125, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 6019 + }, + { + "epoch": 4.808791208791209, + "grad_norm": 0.66015625, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 6020 + }, + { + "epoch": 4.80959040959041, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 6021 + }, + { + "epoch": 4.810389610389611, + "grad_norm": 0.62890625, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 6022 + }, + { + "epoch": 4.811188811188811, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.9011, + "step": 6023 + }, + { + "epoch": 4.811988011988012, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 6024 + }, + { + "epoch": 4.812787212787213, + "grad_norm": 0.59765625, + "learning_rate": 0.0002, + "loss": 0.9017, + "step": 6025 + }, + { + "epoch": 4.813586413586414, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 6026 + }, + { + "epoch": 4.814385614385614, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.897, + "step": 6027 + }, + { + "epoch": 4.815184815184815, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.8985, + "step": 6028 + }, + { + "epoch": 4.815984015984016, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 6029 + }, + { + "epoch": 4.816783216783216, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 6030 + }, + { + "epoch": 4.817582417582417, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 6031 + }, + { + "epoch": 4.818381618381618, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 6032 + }, + { + "epoch": 4.819180819180819, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.9035, + "step": 6033 + }, + { + "epoch": 4.8199800199800205, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 6034 + }, + { + "epoch": 4.820779220779221, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.9002, + "step": 6035 + }, + { + "epoch": 4.821578421578422, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.896, + "step": 6036 + }, + { + "epoch": 4.822377622377623, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 6037 + }, + { + "epoch": 4.823176823176823, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 6038 + }, + { + "epoch": 4.823976023976024, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.9031, + "step": 6039 + }, + { + "epoch": 4.824775224775225, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.898, + "step": 6040 + }, + { + "epoch": 4.825574425574426, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.8895, + "step": 6041 + }, + { + "epoch": 4.826373626373626, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.899, + "step": 6042 + }, + { + "epoch": 4.827172827172827, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 6043 + }, + { + "epoch": 4.827972027972028, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 6044 + }, + { + "epoch": 4.828771228771229, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.8961, + "step": 6045 + }, + { + "epoch": 4.829570429570429, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.8883, + "step": 6046 + }, + { + "epoch": 4.83036963036963, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 6047 + }, + { + "epoch": 4.8311688311688314, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.8843, + "step": 6048 + }, + { + "epoch": 4.831968031968032, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.8911, + "step": 6049 + }, + { + "epoch": 4.832767232767233, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.9058, + "step": 6050 + }, + { + "epoch": 4.833566433566434, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 6051 + }, + { + "epoch": 4.834365634365635, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 6052 + }, + { + "epoch": 4.835164835164835, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.9008, + "step": 6053 + }, + { + "epoch": 4.835964035964036, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 6054 + }, + { + "epoch": 4.836763236763237, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9, + "step": 6055 + }, + { + "epoch": 4.837562437562438, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 6056 + }, + { + "epoch": 4.838361638361638, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8906, + "step": 6057 + }, + { + "epoch": 4.839160839160839, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 6058 + }, + { + "epoch": 4.83996003996004, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 6059 + }, + { + "epoch": 4.84075924075924, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 6060 + }, + { + "epoch": 4.841558441558441, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8858, + "step": 6061 + }, + { + "epoch": 4.842357642357642, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 6062 + }, + { + "epoch": 4.8431568431568435, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 6063 + }, + { + "epoch": 4.843956043956044, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 6064 + }, + { + "epoch": 4.844755244755245, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 6065 + }, + { + "epoch": 4.845554445554446, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 6066 + }, + { + "epoch": 4.846353646353647, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 6067 + }, + { + "epoch": 4.847152847152847, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8987, + "step": 6068 + }, + { + "epoch": 4.847952047952048, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8914, + "step": 6069 + }, + { + "epoch": 4.848751248751249, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8961, + "step": 6070 + }, + { + "epoch": 4.849550449550449, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 6071 + }, + { + "epoch": 4.85034965034965, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.9021, + "step": 6072 + }, + { + "epoch": 4.851148851148851, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 6073 + }, + { + "epoch": 4.851948051948052, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8926, + "step": 6074 + }, + { + "epoch": 4.852747252747252, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 6075 + }, + { + "epoch": 4.853546453546453, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 6076 + }, + { + "epoch": 4.8543456543456545, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 6077 + }, + { + "epoch": 4.8551448551448555, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.9006, + "step": 6078 + }, + { + "epoch": 4.855944055944056, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.896, + "step": 6079 + }, + { + "epoch": 4.856743256743257, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 6080 + }, + { + "epoch": 4.857542457542458, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8999, + "step": 6081 + }, + { + "epoch": 4.858341658341658, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8944, + "step": 6082 + }, + { + "epoch": 4.859140859140859, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 6083 + }, + { + "epoch": 4.85994005994006, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.8884, + "step": 6084 + }, + { + "epoch": 4.860739260739261, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 6085 + }, + { + "epoch": 4.861538461538462, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 6086 + }, + { + "epoch": 4.862337662337662, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8909, + "step": 6087 + }, + { + "epoch": 4.863136863136863, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 6088 + }, + { + "epoch": 4.863936063936064, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8812, + "step": 6089 + }, + { + "epoch": 4.864735264735264, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.9001, + "step": 6090 + }, + { + "epoch": 4.8655344655344654, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 6091 + }, + { + "epoch": 4.8663336663336665, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8989, + "step": 6092 + }, + { + "epoch": 4.867132867132867, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8846, + "step": 6093 + }, + { + "epoch": 4.867932067932068, + "grad_norm": 1.96875, + "learning_rate": 0.0002, + "loss": 0.9245, + "step": 6094 + }, + { + "epoch": 4.868731268731269, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.8914, + "step": 6095 + }, + { + "epoch": 4.86953046953047, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.8977, + "step": 6096 + }, + { + "epoch": 4.870329670329671, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 6097 + }, + { + "epoch": 4.871128871128871, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.898, + "step": 6098 + }, + { + "epoch": 4.871928071928072, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 6099 + }, + { + "epoch": 4.872727272727273, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.9008, + "step": 6100 + }, + { + "epoch": 4.873526473526473, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.9027, + "step": 6101 + }, + { + "epoch": 4.874325674325674, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.9041, + "step": 6102 + }, + { + "epoch": 4.875124875124875, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8979, + "step": 6103 + }, + { + "epoch": 4.875924075924076, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 6104 + }, + { + "epoch": 4.876723276723276, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 6105 + }, + { + "epoch": 4.8775224775224775, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 6106 + }, + { + "epoch": 4.8783216783216785, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.8964, + "step": 6107 + }, + { + "epoch": 4.8791208791208796, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 6108 + }, + { + "epoch": 4.87992007992008, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.9117, + "step": 6109 + }, + { + "epoch": 4.880719280719281, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8872, + "step": 6110 + }, + { + "epoch": 4.881518481518482, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8987, + "step": 6111 + }, + { + "epoch": 4.882317682317682, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.8969, + "step": 6112 + }, + { + "epoch": 4.883116883116883, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.8833, + "step": 6113 + }, + { + "epoch": 4.883916083916084, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 6114 + }, + { + "epoch": 4.884715284715285, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.8871, + "step": 6115 + }, + { + "epoch": 4.885514485514485, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 6116 + }, + { + "epoch": 4.886313686313686, + "grad_norm": 0.25390625, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 6117 + }, + { + "epoch": 4.887112887112887, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 6118 + }, + { + "epoch": 4.887912087912088, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 6119 + }, + { + "epoch": 4.8887112887112885, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 6120 + }, + { + "epoch": 4.8895104895104895, + "grad_norm": 0.5859375, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 6121 + }, + { + "epoch": 4.8903096903096905, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 6122 + }, + { + "epoch": 4.891108891108891, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 6123 + }, + { + "epoch": 4.891908091908092, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 6124 + }, + { + "epoch": 4.892707292707293, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 6125 + }, + { + "epoch": 4.893506493506494, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 6126 + }, + { + "epoch": 4.894305694305694, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 6127 + }, + { + "epoch": 4.895104895104895, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 6128 + }, + { + "epoch": 4.895904095904096, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 6129 + }, + { + "epoch": 4.896703296703297, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 6130 + }, + { + "epoch": 4.897502497502497, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 6131 + }, + { + "epoch": 4.898301698301698, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 6132 + }, + { + "epoch": 4.899100899100899, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.8978, + "step": 6133 + }, + { + "epoch": 4.8999000999000994, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 6134 + }, + { + "epoch": 4.9006993006993005, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 6135 + }, + { + "epoch": 4.9014985014985015, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 6136 + }, + { + "epoch": 4.902297702297703, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.8969, + "step": 6137 + }, + { + "epoch": 4.903096903096903, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9022, + "step": 6138 + }, + { + "epoch": 4.903896103896104, + "grad_norm": 0.60546875, + "learning_rate": 0.0002, + "loss": 0.8977, + "step": 6139 + }, + { + "epoch": 4.904695304695305, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.8819, + "step": 6140 + }, + { + "epoch": 4.905494505494506, + "grad_norm": 0.6171875, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 6141 + }, + { + "epoch": 4.906293706293706, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.9024, + "step": 6142 + }, + { + "epoch": 4.907092907092907, + "grad_norm": 0.59765625, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 6143 + }, + { + "epoch": 4.907892107892108, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 6144 + }, + { + "epoch": 4.908691308691308, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.8838, + "step": 6145 + }, + { + "epoch": 4.909490509490509, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 6146 + }, + { + "epoch": 4.91028971028971, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 6147 + }, + { + "epoch": 4.911088911088911, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 6148 + }, + { + "epoch": 4.911888111888112, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.8884, + "step": 6149 + }, + { + "epoch": 4.9126873126873125, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.8955, + "step": 6150 + }, + { + "epoch": 4.9134865134865136, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.8879, + "step": 6151 + }, + { + "epoch": 4.914285714285715, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 6152 + }, + { + "epoch": 4.915084915084915, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.8855, + "step": 6153 + }, + { + "epoch": 4.915884115884116, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 6154 + }, + { + "epoch": 4.916683316683317, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.894, + "step": 6155 + }, + { + "epoch": 4.917482517482518, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.8911, + "step": 6156 + }, + { + "epoch": 4.918281718281718, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 6157 + }, + { + "epoch": 4.919080919080919, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 6158 + }, + { + "epoch": 4.91988011988012, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 6159 + }, + { + "epoch": 4.920679320679321, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9022, + "step": 6160 + }, + { + "epoch": 4.921478521478521, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 6161 + }, + { + "epoch": 4.922277722277722, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 6162 + }, + { + "epoch": 4.923076923076923, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.894, + "step": 6163 + }, + { + "epoch": 4.9238761238761235, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 6164 + }, + { + "epoch": 4.9246753246753245, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 6165 + }, + { + "epoch": 4.925474525474526, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 6166 + }, + { + "epoch": 4.926273726273727, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.8854, + "step": 6167 + }, + { + "epoch": 4.927072927072927, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.887, + "step": 6168 + }, + { + "epoch": 4.927872127872128, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.8957, + "step": 6169 + }, + { + "epoch": 4.928671328671329, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 6170 + }, + { + "epoch": 4.92947052947053, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 6171 + }, + { + "epoch": 4.93026973026973, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.9001, + "step": 6172 + }, + { + "epoch": 4.931068931068931, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.8977, + "step": 6173 + }, + { + "epoch": 4.931868131868132, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8858, + "step": 6174 + }, + { + "epoch": 4.932667332667332, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.9074, + "step": 6175 + }, + { + "epoch": 4.933466533466533, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 6176 + }, + { + "epoch": 4.934265734265734, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 6177 + }, + { + "epoch": 4.935064935064935, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 6178 + }, + { + "epoch": 4.9358641358641355, + "grad_norm": 0.640625, + "learning_rate": 0.0002, + "loss": 0.8876, + "step": 6179 + }, + { + "epoch": 4.936663336663337, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.8964, + "step": 6180 + }, + { + "epoch": 4.937462537462538, + "grad_norm": 0.6484375, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 6181 + }, + { + "epoch": 4.938261738261739, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 6182 + }, + { + "epoch": 4.939060939060939, + "grad_norm": 0.93359375, + "learning_rate": 0.0002, + "loss": 0.9087, + "step": 6183 + }, + { + "epoch": 4.93986013986014, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.9011, + "step": 6184 + }, + { + "epoch": 4.940659340659341, + "grad_norm": 0.6015625, + "learning_rate": 0.0002, + "loss": 0.9, + "step": 6185 + }, + { + "epoch": 4.941458541458541, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.8889, + "step": 6186 + }, + { + "epoch": 4.942257742257742, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 6187 + }, + { + "epoch": 4.943056943056943, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.8909, + "step": 6188 + }, + { + "epoch": 4.943856143856144, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.8994, + "step": 6189 + }, + { + "epoch": 4.944655344655344, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.8823, + "step": 6190 + }, + { + "epoch": 4.945454545454545, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 6191 + }, + { + "epoch": 4.946253746253746, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 6192 + }, + { + "epoch": 4.947052947052947, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.8964, + "step": 6193 + }, + { + "epoch": 4.9478521478521476, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.8998, + "step": 6194 + }, + { + "epoch": 4.948651348651349, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.8969, + "step": 6195 + }, + { + "epoch": 4.94945054945055, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 6196 + }, + { + "epoch": 4.95024975024975, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.8837, + "step": 6197 + }, + { + "epoch": 4.951048951048951, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 6198 + }, + { + "epoch": 4.951848151848152, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.8994, + "step": 6199 + }, + { + "epoch": 4.952647352647353, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.8863, + "step": 6200 + }, + { + "epoch": 4.953446553446554, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.9018, + "step": 6201 + }, + { + "epoch": 4.954245754245754, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.8965, + "step": 6202 + }, + { + "epoch": 4.955044955044955, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.8995, + "step": 6203 + }, + { + "epoch": 4.955844155844156, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.8862, + "step": 6204 + }, + { + "epoch": 4.956643356643356, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 6205 + }, + { + "epoch": 4.957442557442557, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.8864, + "step": 6206 + }, + { + "epoch": 4.958241758241758, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.8942, + "step": 6207 + }, + { + "epoch": 4.959040959040959, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 6208 + }, + { + "epoch": 4.95984015984016, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 6209 + }, + { + "epoch": 4.960639360639361, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 6210 + }, + { + "epoch": 4.961438561438562, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 6211 + }, + { + "epoch": 4.962237762237763, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.8961, + "step": 6212 + }, + { + "epoch": 4.963036963036963, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 6213 + }, + { + "epoch": 4.963836163836164, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8856, + "step": 6214 + }, + { + "epoch": 4.964635364635365, + "grad_norm": 0.66796875, + "learning_rate": 0.0002, + "loss": 0.9048, + "step": 6215 + }, + { + "epoch": 4.965434565434565, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.8895, + "step": 6216 + }, + { + "epoch": 4.966233766233766, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.9104, + "step": 6217 + }, + { + "epoch": 4.967032967032967, + "grad_norm": 1.2421875, + "learning_rate": 0.0002, + "loss": 0.9109, + "step": 6218 + }, + { + "epoch": 4.967832167832168, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 6219 + }, + { + "epoch": 4.968631368631368, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.8989, + "step": 6220 + }, + { + "epoch": 4.969430569430569, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 6221 + }, + { + "epoch": 4.97022977022977, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 6222 + }, + { + "epoch": 4.9710289710289715, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 6223 + }, + { + "epoch": 4.971828171828172, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 6224 + }, + { + "epoch": 4.972627372627373, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 6225 + }, + { + "epoch": 4.973426573426574, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 6226 + }, + { + "epoch": 4.974225774225774, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8879, + "step": 6227 + }, + { + "epoch": 4.975024975024975, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8914, + "step": 6228 + }, + { + "epoch": 4.975824175824176, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 6229 + }, + { + "epoch": 4.976623376623377, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 6230 + }, + { + "epoch": 4.977422577422577, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 6231 + }, + { + "epoch": 4.978221778221778, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.9069, + "step": 6232 + }, + { + "epoch": 4.979020979020979, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 6233 + }, + { + "epoch": 4.97982017982018, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 6234 + }, + { + "epoch": 4.98061938061938, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.887, + "step": 6235 + }, + { + "epoch": 4.981418581418581, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8914, + "step": 6236 + }, + { + "epoch": 4.982217782217782, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 6237 + }, + { + "epoch": 4.983016983016983, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8911, + "step": 6238 + }, + { + "epoch": 4.983816183816184, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 6239 + }, + { + "epoch": 4.984615384615385, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8966, + "step": 6240 + }, + { + "epoch": 4.985414585414586, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8974, + "step": 6241 + }, + { + "epoch": 4.986213786213786, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 6242 + }, + { + "epoch": 4.987012987012987, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8835, + "step": 6243 + }, + { + "epoch": 4.987812187812188, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 6244 + }, + { + "epoch": 4.988611388611389, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8859, + "step": 6245 + }, + { + "epoch": 4.989410589410589, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8985, + "step": 6246 + }, + { + "epoch": 4.99020979020979, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8869, + "step": 6247 + }, + { + "epoch": 4.991008991008991, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 6248 + }, + { + "epoch": 4.991808191808191, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 6249 + }, + { + "epoch": 4.992607392607392, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.9022, + "step": 6250 + }, + { + "epoch": 4.993406593406593, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8993, + "step": 6251 + }, + { + "epoch": 4.9942057942057945, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 6252 + }, + { + "epoch": 4.995004995004995, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 6253 + }, + { + "epoch": 4.995804195804196, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 6254 + }, + { + "epoch": 4.996603396603397, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8886, + "step": 6255 + }, + { + "epoch": 4.997402597402598, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8895, + "step": 6256 + }, + { + "epoch": 4.998201798201798, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 6257 + }, + { + "epoch": 4.999000999000999, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.8826, + "step": 6258 + }, + { + "epoch": 4.9998001998002, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.8968, + "step": 6259 + }, + { + "epoch": 5.0, + "grad_norm": 0.107421875, + "learning_rate": 0.0002, + "loss": 0.226, + "step": 6260 + }, + { + "epoch": 5.000799200799201, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 6261 + }, + { + "epoch": 5.001598401598401, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8909, + "step": 6262 + }, + { + "epoch": 5.002397602397602, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8836, + "step": 6263 + }, + { + "epoch": 5.003196803196803, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 6264 + }, + { + "epoch": 5.003996003996004, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8923, + "step": 6265 + }, + { + "epoch": 5.0047952047952045, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8938, + "step": 6266 + }, + { + "epoch": 5.0055944055944055, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 6267 + }, + { + "epoch": 5.0063936063936065, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8968, + "step": 6268 + }, + { + "epoch": 5.007192807192808, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 6269 + }, + { + "epoch": 5.007992007992008, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.9003, + "step": 6270 + }, + { + "epoch": 5.008791208791209, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 6271 + }, + { + "epoch": 5.00959040959041, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 6272 + }, + { + "epoch": 5.01038961038961, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 6273 + }, + { + "epoch": 5.011188811188811, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 6274 + }, + { + "epoch": 5.011988011988012, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8957, + "step": 6275 + }, + { + "epoch": 5.012787212787213, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.896, + "step": 6276 + }, + { + "epoch": 5.013586413586413, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 6277 + }, + { + "epoch": 5.014385614385614, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 6278 + }, + { + "epoch": 5.015184815184815, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8865, + "step": 6279 + }, + { + "epoch": 5.015984015984016, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.9018, + "step": 6280 + }, + { + "epoch": 5.0167832167832165, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 6281 + }, + { + "epoch": 5.0175824175824175, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 6282 + }, + { + "epoch": 5.018381618381619, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8791, + "step": 6283 + }, + { + "epoch": 5.01918081918082, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 6284 + }, + { + "epoch": 5.01998001998002, + "grad_norm": 0.248046875, + "learning_rate": 0.0002, + "loss": 0.9016, + "step": 6285 + }, + { + "epoch": 5.020779220779221, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 6286 + }, + { + "epoch": 5.021578421578422, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 6287 + }, + { + "epoch": 5.022377622377622, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 6288 + }, + { + "epoch": 5.023176823176823, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.9001, + "step": 6289 + }, + { + "epoch": 5.023976023976024, + "grad_norm": 0.24609375, + "learning_rate": 0.0002, + "loss": 0.9083, + "step": 6290 + }, + { + "epoch": 5.024775224775225, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8911, + "step": 6291 + }, + { + "epoch": 5.025574425574425, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.9003, + "step": 6292 + }, + { + "epoch": 5.026373626373626, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 6293 + }, + { + "epoch": 5.027172827172827, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.884, + "step": 6294 + }, + { + "epoch": 5.027972027972028, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8835, + "step": 6295 + }, + { + "epoch": 5.0287712287712285, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 6296 + }, + { + "epoch": 5.0295704295704295, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.8973, + "step": 6297 + }, + { + "epoch": 5.030369630369631, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 6298 + }, + { + "epoch": 5.031168831168831, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 6299 + }, + { + "epoch": 5.031968031968032, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 6300 + }, + { + "epoch": 5.032767232767233, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 6301 + }, + { + "epoch": 5.033566433566434, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8803, + "step": 6302 + }, + { + "epoch": 5.034365634365634, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 6303 + }, + { + "epoch": 5.035164835164835, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 6304 + }, + { + "epoch": 5.035964035964036, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.8955, + "step": 6305 + }, + { + "epoch": 5.036763236763237, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.8957, + "step": 6306 + }, + { + "epoch": 5.037562437562437, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 6307 + }, + { + "epoch": 5.038361638361638, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 6308 + }, + { + "epoch": 5.039160839160839, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8879, + "step": 6309 + }, + { + "epoch": 5.03996003996004, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.9027, + "step": 6310 + }, + { + "epoch": 5.0407592407592405, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 6311 + }, + { + "epoch": 5.041558441558442, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.942, + "step": 6312 + }, + { + "epoch": 5.042357642357643, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 6313 + }, + { + "epoch": 5.043156843156843, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 6314 + }, + { + "epoch": 5.043956043956044, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8961, + "step": 6315 + }, + { + "epoch": 5.044755244755245, + "grad_norm": 0.25, + "learning_rate": 0.0002, + "loss": 0.8961, + "step": 6316 + }, + { + "epoch": 5.045554445554446, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8906, + "step": 6317 + }, + { + "epoch": 5.046353646353646, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 6318 + }, + { + "epoch": 5.047152847152847, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 6319 + }, + { + "epoch": 5.047952047952048, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8886, + "step": 6320 + }, + { + "epoch": 5.048751248751249, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 6321 + }, + { + "epoch": 5.049550449550449, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 6322 + }, + { + "epoch": 5.05034965034965, + "grad_norm": 0.24609375, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 6323 + }, + { + "epoch": 5.051148851148851, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 6324 + }, + { + "epoch": 5.0519480519480515, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.9008, + "step": 6325 + }, + { + "epoch": 5.052747252747253, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8926, + "step": 6326 + }, + { + "epoch": 5.053546453546454, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 6327 + }, + { + "epoch": 5.054345654345655, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8845, + "step": 6328 + }, + { + "epoch": 5.055144855144855, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 6329 + }, + { + "epoch": 5.055944055944056, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 6330 + }, + { + "epoch": 5.056743256743257, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8884, + "step": 6331 + }, + { + "epoch": 5.057542457542458, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 6332 + }, + { + "epoch": 5.058341658341658, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 6333 + }, + { + "epoch": 5.059140859140859, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.883, + "step": 6334 + }, + { + "epoch": 5.05994005994006, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 6335 + }, + { + "epoch": 5.060739260739261, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 6336 + }, + { + "epoch": 5.061538461538461, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 6337 + }, + { + "epoch": 5.062337662337662, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 6338 + }, + { + "epoch": 5.063136863136863, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8992, + "step": 6339 + }, + { + "epoch": 5.0639360639360635, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 6340 + }, + { + "epoch": 5.064735264735265, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8895, + "step": 6341 + }, + { + "epoch": 5.065534465534466, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 6342 + }, + { + "epoch": 5.066333666333667, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 6343 + }, + { + "epoch": 5.067132867132867, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8855, + "step": 6344 + }, + { + "epoch": 5.067932067932068, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.882, + "step": 6345 + }, + { + "epoch": 5.068731268731269, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8876, + "step": 6346 + }, + { + "epoch": 5.06953046953047, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 6347 + }, + { + "epoch": 5.07032967032967, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8884, + "step": 6348 + }, + { + "epoch": 5.071128871128871, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8923, + "step": 6349 + }, + { + "epoch": 5.071928071928072, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.9025, + "step": 6350 + }, + { + "epoch": 5.072727272727272, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8781, + "step": 6351 + }, + { + "epoch": 5.073526473526473, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8911, + "step": 6352 + }, + { + "epoch": 5.074325674325674, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 6353 + }, + { + "epoch": 5.075124875124875, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.9008, + "step": 6354 + }, + { + "epoch": 5.075924075924076, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8917, + "step": 6355 + }, + { + "epoch": 5.076723276723277, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 6356 + }, + { + "epoch": 5.077522477522478, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8877, + "step": 6357 + }, + { + "epoch": 5.078321678321679, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.9048, + "step": 6358 + }, + { + "epoch": 5.079120879120879, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 6359 + }, + { + "epoch": 5.07992007992008, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 6360 + }, + { + "epoch": 5.080719280719281, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.9007, + "step": 6361 + }, + { + "epoch": 5.081518481518482, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8821, + "step": 6362 + }, + { + "epoch": 5.082317682317682, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8844, + "step": 6363 + }, + { + "epoch": 5.083116883116883, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 6364 + }, + { + "epoch": 5.083916083916084, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.8872, + "step": 6365 + }, + { + "epoch": 5.084715284715284, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8795, + "step": 6366 + }, + { + "epoch": 5.085514485514485, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.9015, + "step": 6367 + }, + { + "epoch": 5.086313686313686, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.9022, + "step": 6368 + }, + { + "epoch": 5.0871128871128874, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 6369 + }, + { + "epoch": 5.087912087912088, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8751, + "step": 6370 + }, + { + "epoch": 5.088711288711289, + "grad_norm": 0.58203125, + "learning_rate": 0.0002, + "loss": 0.8817, + "step": 6371 + }, + { + "epoch": 5.08951048951049, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.9068, + "step": 6372 + }, + { + "epoch": 5.090309690309691, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8923, + "step": 6373 + }, + { + "epoch": 5.091108891108891, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.9012, + "step": 6374 + }, + { + "epoch": 5.091908091908092, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.8997, + "step": 6375 + }, + { + "epoch": 5.092707292707293, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 6376 + }, + { + "epoch": 5.093506493506493, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.9136, + "step": 6377 + }, + { + "epoch": 5.094305694305694, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 6378 + }, + { + "epoch": 5.095104895104895, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8835, + "step": 6379 + }, + { + "epoch": 5.095904095904096, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.8842, + "step": 6380 + }, + { + "epoch": 5.096703296703296, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 6381 + }, + { + "epoch": 5.097502497502497, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 6382 + }, + { + "epoch": 5.098301698301698, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 6383 + }, + { + "epoch": 5.0991008991008995, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 6384 + }, + { + "epoch": 5.0999000999001, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.899, + "step": 6385 + }, + { + "epoch": 5.100699300699301, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.8825, + "step": 6386 + }, + { + "epoch": 5.101498501498502, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 6387 + }, + { + "epoch": 5.102297702297703, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.9002, + "step": 6388 + }, + { + "epoch": 5.103096903096903, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.8855, + "step": 6389 + }, + { + "epoch": 5.103896103896104, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.884, + "step": 6390 + }, + { + "epoch": 5.104695304695305, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9012, + "step": 6391 + }, + { + "epoch": 5.105494505494505, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 6392 + }, + { + "epoch": 5.106293706293706, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 6393 + }, + { + "epoch": 5.107092907092907, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 6394 + }, + { + "epoch": 5.107892107892108, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 6395 + }, + { + "epoch": 5.108691308691308, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8833, + "step": 6396 + }, + { + "epoch": 5.109490509490509, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 6397 + }, + { + "epoch": 5.1102897102897105, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.896, + "step": 6398 + }, + { + "epoch": 5.1110889110889115, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.894, + "step": 6399 + }, + { + "epoch": 5.111888111888112, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.8906, + "step": 6400 + }, + { + "epoch": 5.112687312687313, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 6401 + }, + { + "epoch": 5.113486513486514, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 6402 + }, + { + "epoch": 5.114285714285714, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.9029, + "step": 6403 + }, + { + "epoch": 5.115084915084915, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.8876, + "step": 6404 + }, + { + "epoch": 5.115884115884116, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.8987, + "step": 6405 + }, + { + "epoch": 5.116683316683317, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.8899, + "step": 6406 + }, + { + "epoch": 5.117482517482517, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 6407 + }, + { + "epoch": 5.118281718281718, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.885, + "step": 6408 + }, + { + "epoch": 5.119080919080919, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.9019, + "step": 6409 + }, + { + "epoch": 5.11988011988012, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 6410 + }, + { + "epoch": 5.12067932067932, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.8978, + "step": 6411 + }, + { + "epoch": 5.1214785214785215, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.8955, + "step": 6412 + }, + { + "epoch": 5.1222777222777225, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.8834, + "step": 6413 + }, + { + "epoch": 5.123076923076923, + "grad_norm": 0.59375, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 6414 + }, + { + "epoch": 5.123876123876124, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.8961, + "step": 6415 + }, + { + "epoch": 5.124675324675325, + "grad_norm": 0.60546875, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 6416 + }, + { + "epoch": 5.125474525474526, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 6417 + }, + { + "epoch": 5.126273726273726, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.8848, + "step": 6418 + }, + { + "epoch": 5.127072927072927, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 6419 + }, + { + "epoch": 5.127872127872128, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.8999, + "step": 6420 + }, + { + "epoch": 5.128671328671329, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 6421 + }, + { + "epoch": 5.129470529470529, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 6422 + }, + { + "epoch": 5.13026973026973, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 6423 + }, + { + "epoch": 5.131068931068931, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 6424 + }, + { + "epoch": 5.131868131868132, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 6425 + }, + { + "epoch": 5.132667332667332, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 6426 + }, + { + "epoch": 5.1334665334665335, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 6427 + }, + { + "epoch": 5.1342657342657345, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 6428 + }, + { + "epoch": 5.135064935064935, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 6429 + }, + { + "epoch": 5.135864135864136, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.8875, + "step": 6430 + }, + { + "epoch": 5.136663336663337, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9015, + "step": 6431 + }, + { + "epoch": 5.137462537462538, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 6432 + }, + { + "epoch": 5.138261738261738, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 6433 + }, + { + "epoch": 5.139060939060939, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 6434 + }, + { + "epoch": 5.13986013986014, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.9033, + "step": 6435 + }, + { + "epoch": 5.140659340659341, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 6436 + }, + { + "epoch": 5.141458541458541, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 6437 + }, + { + "epoch": 5.142257742257742, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 6438 + }, + { + "epoch": 5.143056943056943, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 6439 + }, + { + "epoch": 5.143856143856144, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.8888, + "step": 6440 + }, + { + "epoch": 5.1446553446553445, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.8838, + "step": 6441 + }, + { + "epoch": 5.1454545454545455, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.8876, + "step": 6442 + }, + { + "epoch": 5.1462537462537465, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 6443 + }, + { + "epoch": 5.147052947052947, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.8835, + "step": 6444 + }, + { + "epoch": 5.147852147852148, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 6445 + }, + { + "epoch": 5.148651348651349, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.9037, + "step": 6446 + }, + { + "epoch": 5.14945054945055, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.8871, + "step": 6447 + }, + { + "epoch": 5.15024975024975, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.9018, + "step": 6448 + }, + { + "epoch": 5.151048951048951, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 6449 + }, + { + "epoch": 5.151848151848152, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 6450 + }, + { + "epoch": 5.152647352647353, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 6451 + }, + { + "epoch": 5.153446553446553, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 6452 + }, + { + "epoch": 5.154245754245754, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 6453 + }, + { + "epoch": 5.155044955044955, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 6454 + }, + { + "epoch": 5.1558441558441555, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8859, + "step": 6455 + }, + { + "epoch": 5.1566433566433565, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8879, + "step": 6456 + }, + { + "epoch": 5.1574425574425575, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8968, + "step": 6457 + }, + { + "epoch": 5.158241758241759, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 6458 + }, + { + "epoch": 5.159040959040959, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8938, + "step": 6459 + }, + { + "epoch": 5.15984015984016, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 6460 + }, + { + "epoch": 5.160639360639361, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.9006, + "step": 6461 + }, + { + "epoch": 5.161438561438562, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 6462 + }, + { + "epoch": 5.162237762237762, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8891, + "step": 6463 + }, + { + "epoch": 5.163036963036963, + "grad_norm": 0.671875, + "learning_rate": 0.0002, + "loss": 0.9062, + "step": 6464 + }, + { + "epoch": 5.163836163836164, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8965, + "step": 6465 + }, + { + "epoch": 5.164635364635364, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 6466 + }, + { + "epoch": 5.165434565434565, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8893, + "step": 6467 + }, + { + "epoch": 5.166233766233766, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 6468 + }, + { + "epoch": 5.167032967032967, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 6469 + }, + { + "epoch": 5.1678321678321675, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 6470 + }, + { + "epoch": 5.1686313686313685, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 6471 + }, + { + "epoch": 5.1694305694305696, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 6472 + }, + { + "epoch": 5.170229770229771, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8972, + "step": 6473 + }, + { + "epoch": 5.171028971028971, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8788, + "step": 6474 + }, + { + "epoch": 5.171828171828172, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 6475 + }, + { + "epoch": 5.172627372627373, + "grad_norm": 0.244140625, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 6476 + }, + { + "epoch": 5.173426573426573, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8895, + "step": 6477 + }, + { + "epoch": 5.174225774225774, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 6478 + }, + { + "epoch": 5.175024975024975, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 6479 + }, + { + "epoch": 5.175824175824176, + "grad_norm": 1.546875, + "learning_rate": 0.0002, + "loss": 0.9192, + "step": 6480 + }, + { + "epoch": 5.176623376623376, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8997, + "step": 6481 + }, + { + "epoch": 5.177422577422577, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8884, + "step": 6482 + }, + { + "epoch": 5.178221778221778, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.9013, + "step": 6483 + }, + { + "epoch": 5.179020979020979, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.9083, + "step": 6484 + }, + { + "epoch": 5.1798201798201795, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 6485 + }, + { + "epoch": 5.1806193806193805, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 6486 + }, + { + "epoch": 5.181418581418582, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8863, + "step": 6487 + }, + { + "epoch": 5.182217782217783, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.8942, + "step": 6488 + }, + { + "epoch": 5.183016983016983, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 6489 + }, + { + "epoch": 5.183816183816184, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8923, + "step": 6490 + }, + { + "epoch": 5.184615384615385, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 6491 + }, + { + "epoch": 5.185414585414585, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8994, + "step": 6492 + }, + { + "epoch": 5.186213786213786, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 6493 + }, + { + "epoch": 5.187012987012987, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 6494 + }, + { + "epoch": 5.187812187812188, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 6495 + }, + { + "epoch": 5.188611388611388, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 6496 + }, + { + "epoch": 5.189410589410589, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.9017, + "step": 6497 + }, + { + "epoch": 5.19020979020979, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8858, + "step": 6498 + }, + { + "epoch": 5.191008991008991, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 6499 + }, + { + "epoch": 5.1918081918081915, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 6500 + }, + { + "epoch": 5.192607392607393, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8944, + "step": 6501 + }, + { + "epoch": 5.193406593406594, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 6502 + }, + { + "epoch": 5.194205794205795, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 6503 + }, + { + "epoch": 5.195004995004995, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8891, + "step": 6504 + }, + { + "epoch": 5.195804195804196, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 6505 + }, + { + "epoch": 5.196603396603397, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.9006, + "step": 6506 + }, + { + "epoch": 5.197402597402597, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 6507 + }, + { + "epoch": 5.198201798201798, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8992, + "step": 6508 + }, + { + "epoch": 5.199000999000999, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8994, + "step": 6509 + }, + { + "epoch": 5.1998001998002, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 6510 + }, + { + "epoch": 5.2005994005994, + "grad_norm": 0.248046875, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 6511 + }, + { + "epoch": 5.201398601398601, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8852, + "step": 6512 + }, + { + "epoch": 5.202197802197802, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.8852, + "step": 6513 + }, + { + "epoch": 5.202997002997003, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 6514 + }, + { + "epoch": 5.203796203796204, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 6515 + }, + { + "epoch": 5.204595404595405, + "grad_norm": 0.244140625, + "learning_rate": 0.0002, + "loss": 0.8864, + "step": 6516 + }, + { + "epoch": 5.205394605394606, + "grad_norm": 0.25, + "learning_rate": 0.0002, + "loss": 0.8911, + "step": 6517 + }, + { + "epoch": 5.206193806193806, + "grad_norm": 1.5, + "learning_rate": 0.0002, + "loss": 0.9111, + "step": 6518 + }, + { + "epoch": 5.206993006993007, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8841, + "step": 6519 + }, + { + "epoch": 5.207792207792208, + "grad_norm": 0.2373046875, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 6520 + }, + { + "epoch": 5.208591408591409, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.8826, + "step": 6521 + }, + { + "epoch": 5.209390609390609, + "grad_norm": 0.244140625, + "learning_rate": 0.0002, + "loss": 0.8789, + "step": 6522 + }, + { + "epoch": 5.21018981018981, + "grad_norm": 0.25, + "learning_rate": 0.0002, + "loss": 0.8871, + "step": 6523 + }, + { + "epoch": 5.210989010989011, + "grad_norm": 0.2431640625, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 6524 + }, + { + "epoch": 5.211788211788212, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8847, + "step": 6525 + }, + { + "epoch": 5.212587412587412, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8888, + "step": 6526 + }, + { + "epoch": 5.213386613386613, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8966, + "step": 6527 + }, + { + "epoch": 5.214185814185814, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 6528 + }, + { + "epoch": 5.2149850149850145, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8972, + "step": 6529 + }, + { + "epoch": 5.215784215784216, + "grad_norm": 0.248046875, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 6530 + }, + { + "epoch": 5.216583416583417, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8848, + "step": 6531 + }, + { + "epoch": 5.217382617382618, + "grad_norm": 0.2451171875, + "learning_rate": 0.0002, + "loss": 0.9027, + "step": 6532 + }, + { + "epoch": 5.218181818181818, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8831, + "step": 6533 + }, + { + "epoch": 5.218981018981019, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.8923, + "step": 6534 + }, + { + "epoch": 5.21978021978022, + "grad_norm": 0.24609375, + "learning_rate": 0.0002, + "loss": 0.8889, + "step": 6535 + }, + { + "epoch": 5.220579420579421, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 6536 + }, + { + "epoch": 5.221378621378621, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 6537 + }, + { + "epoch": 5.222177822177822, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 6538 + }, + { + "epoch": 5.222977022977023, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.8804, + "step": 6539 + }, + { + "epoch": 5.223776223776224, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 6540 + }, + { + "epoch": 5.224575424575424, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 6541 + }, + { + "epoch": 5.225374625374625, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 6542 + }, + { + "epoch": 5.226173826173826, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 6543 + }, + { + "epoch": 5.226973026973027, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8862, + "step": 6544 + }, + { + "epoch": 5.227772227772228, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 6545 + }, + { + "epoch": 5.228571428571429, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 6546 + }, + { + "epoch": 5.22937062937063, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8989, + "step": 6547 + }, + { + "epoch": 5.23016983016983, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8983, + "step": 6548 + }, + { + "epoch": 5.230969030969031, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 6549 + }, + { + "epoch": 5.231768231768232, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 6550 + }, + { + "epoch": 5.232567432567433, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 6551 + }, + { + "epoch": 5.233366633366633, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 6552 + }, + { + "epoch": 5.234165834165834, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8917, + "step": 6553 + }, + { + "epoch": 5.234965034965035, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 6554 + }, + { + "epoch": 5.235764235764236, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 6555 + }, + { + "epoch": 5.236563436563436, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8955, + "step": 6556 + }, + { + "epoch": 5.237362637362637, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8879, + "step": 6557 + }, + { + "epoch": 5.2381618381618384, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 6558 + }, + { + "epoch": 5.238961038961039, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 6559 + }, + { + "epoch": 5.23976023976024, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 6560 + }, + { + "epoch": 5.240559440559441, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8875, + "step": 6561 + }, + { + "epoch": 5.241358641358642, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 6562 + }, + { + "epoch": 5.242157842157842, + "grad_norm": 1.515625, + "learning_rate": 0.0002, + "loss": 0.9095, + "step": 6563 + }, + { + "epoch": 5.242957042957043, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 6564 + }, + { + "epoch": 5.243756243756244, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 6565 + }, + { + "epoch": 5.244555444555445, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 6566 + }, + { + "epoch": 5.245354645354645, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 6567 + }, + { + "epoch": 5.246153846153846, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8883, + "step": 6568 + }, + { + "epoch": 5.246953046953047, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8854, + "step": 6569 + }, + { + "epoch": 5.247752247752247, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 6570 + }, + { + "epoch": 5.248551448551448, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8973, + "step": 6571 + }, + { + "epoch": 5.249350649350649, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8865, + "step": 6572 + }, + { + "epoch": 5.2501498501498505, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 6573 + }, + { + "epoch": 5.250949050949051, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 6574 + }, + { + "epoch": 5.251748251748252, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 6575 + }, + { + "epoch": 5.252547452547453, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8832, + "step": 6576 + }, + { + "epoch": 5.253346653346654, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 6577 + }, + { + "epoch": 5.254145854145854, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 6578 + }, + { + "epoch": 5.254945054945055, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 6579 + }, + { + "epoch": 5.255744255744256, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8773, + "step": 6580 + }, + { + "epoch": 5.256543456543456, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8865, + "step": 6581 + }, + { + "epoch": 5.257342657342657, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 6582 + }, + { + "epoch": 5.258141858141858, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 6583 + }, + { + "epoch": 5.258941058941059, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 6584 + }, + { + "epoch": 5.259740259740259, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8944, + "step": 6585 + }, + { + "epoch": 5.26053946053946, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8788, + "step": 6586 + }, + { + "epoch": 5.2613386613386615, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 6587 + }, + { + "epoch": 5.2621378621378625, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.8884, + "step": 6588 + }, + { + "epoch": 5.262937062937063, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8863, + "step": 6589 + }, + { + "epoch": 5.263736263736264, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 6590 + }, + { + "epoch": 5.264535464535465, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 6591 + }, + { + "epoch": 5.265334665334665, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8977, + "step": 6592 + }, + { + "epoch": 5.266133866133866, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8849, + "step": 6593 + }, + { + "epoch": 5.266933066933067, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 6594 + }, + { + "epoch": 5.267732267732268, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 6595 + }, + { + "epoch": 5.268531468531468, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8893, + "step": 6596 + }, + { + "epoch": 5.269330669330669, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8884, + "step": 6597 + }, + { + "epoch": 5.27012987012987, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 6598 + }, + { + "epoch": 5.270929070929071, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 6599 + }, + { + "epoch": 5.271728271728271, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8743, + "step": 6600 + }, + { + "epoch": 5.2725274725274724, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 6601 + }, + { + "epoch": 5.2733266733266735, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 6602 + }, + { + "epoch": 5.2741258741258745, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8968, + "step": 6603 + }, + { + "epoch": 5.274925074925075, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 6604 + }, + { + "epoch": 5.275724275724276, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 6605 + }, + { + "epoch": 5.276523476523477, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8992, + "step": 6606 + }, + { + "epoch": 5.277322677322678, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 6607 + }, + { + "epoch": 5.278121878121878, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 6608 + }, + { + "epoch": 5.278921078921079, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8995, + "step": 6609 + }, + { + "epoch": 5.27972027972028, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 6610 + }, + { + "epoch": 5.28051948051948, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 6611 + }, + { + "epoch": 5.281318681318681, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.8972, + "step": 6612 + }, + { + "epoch": 5.282117882117882, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 6613 + }, + { + "epoch": 5.282917082917083, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.8835, + "step": 6614 + }, + { + "epoch": 5.283716283716283, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8843, + "step": 6615 + }, + { + "epoch": 5.2845154845154845, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.8906, + "step": 6616 + }, + { + "epoch": 5.2853146853146855, + "grad_norm": 0.59375, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 6617 + }, + { + "epoch": 5.2861138861138866, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.8876, + "step": 6618 + }, + { + "epoch": 5.286913086913087, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.884, + "step": 6619 + }, + { + "epoch": 5.287712287712288, + "grad_norm": 0.734375, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 6620 + }, + { + "epoch": 5.288511488511489, + "grad_norm": 0.765625, + "learning_rate": 0.0002, + "loss": 0.8955, + "step": 6621 + }, + { + "epoch": 5.289310689310689, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.8994, + "step": 6622 + }, + { + "epoch": 5.29010989010989, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 6623 + }, + { + "epoch": 5.290909090909091, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.8923, + "step": 6624 + }, + { + "epoch": 5.291708291708292, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 6625 + }, + { + "epoch": 5.292507492507492, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.8998, + "step": 6626 + }, + { + "epoch": 5.293306693306693, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8888, + "step": 6627 + }, + { + "epoch": 5.294105894105894, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 6628 + }, + { + "epoch": 5.294905094905095, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.8942, + "step": 6629 + }, + { + "epoch": 5.2957042957042955, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9001, + "step": 6630 + }, + { + "epoch": 5.2965034965034965, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.8833, + "step": 6631 + }, + { + "epoch": 5.2973026973026975, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 6632 + }, + { + "epoch": 5.298101898101898, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 6633 + }, + { + "epoch": 5.298901098901099, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.8841, + "step": 6634 + }, + { + "epoch": 5.2997002997003, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8818, + "step": 6635 + }, + { + "epoch": 5.300499500499501, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 6636 + }, + { + "epoch": 5.301298701298701, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8853, + "step": 6637 + }, + { + "epoch": 5.302097902097902, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8811, + "step": 6638 + }, + { + "epoch": 5.302897102897103, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8978, + "step": 6639 + }, + { + "epoch": 5.303696303696304, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8955, + "step": 6640 + }, + { + "epoch": 5.304495504495504, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.8834, + "step": 6641 + }, + { + "epoch": 5.305294705294705, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 6642 + }, + { + "epoch": 5.306093906093906, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 6643 + }, + { + "epoch": 5.3068931068931064, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 6644 + }, + { + "epoch": 5.3076923076923075, + "grad_norm": 1.671875, + "learning_rate": 0.0002, + "loss": 0.8992, + "step": 6645 + }, + { + "epoch": 5.3084915084915085, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 6646 + }, + { + "epoch": 5.30929070929071, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 6647 + }, + { + "epoch": 5.31008991008991, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 6648 + }, + { + "epoch": 5.310889110889111, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.8965, + "step": 6649 + }, + { + "epoch": 5.311688311688312, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 6650 + }, + { + "epoch": 5.312487512487513, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 6651 + }, + { + "epoch": 5.313286713286713, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 6652 + }, + { + "epoch": 5.314085914085914, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 6653 + }, + { + "epoch": 5.314885114885115, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.898, + "step": 6654 + }, + { + "epoch": 5.315684315684316, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 6655 + }, + { + "epoch": 5.316483516483516, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.8787, + "step": 6656 + }, + { + "epoch": 5.317282717282717, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.9021, + "step": 6657 + }, + { + "epoch": 5.318081918081918, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.8867, + "step": 6658 + }, + { + "epoch": 5.3188811188811185, + "grad_norm": 0.58203125, + "learning_rate": 0.0002, + "loss": 0.8991, + "step": 6659 + }, + { + "epoch": 5.3196803196803195, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.8926, + "step": 6660 + }, + { + "epoch": 5.3204795204795206, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.884, + "step": 6661 + }, + { + "epoch": 5.321278721278722, + "grad_norm": 0.6875, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 6662 + }, + { + "epoch": 5.322077922077922, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 6663 + }, + { + "epoch": 5.322877122877123, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.8938, + "step": 6664 + }, + { + "epoch": 5.323676323676324, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 6665 + }, + { + "epoch": 5.324475524475525, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8966, + "step": 6666 + }, + { + "epoch": 5.325274725274725, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 6667 + }, + { + "epoch": 5.326073926073926, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.8852, + "step": 6668 + }, + { + "epoch": 5.326873126873127, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.9009, + "step": 6669 + }, + { + "epoch": 5.327672327672328, + "grad_norm": 0.6484375, + "learning_rate": 0.0002, + "loss": 0.8911, + "step": 6670 + }, + { + "epoch": 5.328471528471528, + "grad_norm": 0.59375, + "learning_rate": 0.0002, + "loss": 0.9043, + "step": 6671 + }, + { + "epoch": 5.329270729270729, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.9011, + "step": 6672 + }, + { + "epoch": 5.33006993006993, + "grad_norm": 0.609375, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 6673 + }, + { + "epoch": 5.3308691308691305, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 6674 + }, + { + "epoch": 5.3316683316683315, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8965, + "step": 6675 + }, + { + "epoch": 5.332467532467533, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 6676 + }, + { + "epoch": 5.333266733266734, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 6677 + }, + { + "epoch": 5.334065934065934, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8851, + "step": 6678 + }, + { + "epoch": 5.334865134865135, + "grad_norm": 0.671875, + "learning_rate": 0.0002, + "loss": 0.9014, + "step": 6679 + }, + { + "epoch": 5.335664335664336, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8862, + "step": 6680 + }, + { + "epoch": 5.336463536463537, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.898, + "step": 6681 + }, + { + "epoch": 5.337262737262737, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 6682 + }, + { + "epoch": 5.338061938061938, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 6683 + }, + { + "epoch": 5.338861138861139, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.8895, + "step": 6684 + }, + { + "epoch": 5.339660339660339, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 6685 + }, + { + "epoch": 5.34045954045954, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 6686 + }, + { + "epoch": 5.341258741258741, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.898, + "step": 6687 + }, + { + "epoch": 5.342057942057942, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.8926, + "step": 6688 + }, + { + "epoch": 5.3428571428571425, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 6689 + }, + { + "epoch": 5.343656343656344, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.8845, + "step": 6690 + }, + { + "epoch": 5.344455544455545, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 6691 + }, + { + "epoch": 5.345254745254746, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 6692 + }, + { + "epoch": 5.346053946053946, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 6693 + }, + { + "epoch": 5.346853146853147, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.8955, + "step": 6694 + }, + { + "epoch": 5.347652347652348, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8911, + "step": 6695 + }, + { + "epoch": 5.348451548451548, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 6696 + }, + { + "epoch": 5.349250749250749, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8869, + "step": 6697 + }, + { + "epoch": 5.35004995004995, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 6698 + }, + { + "epoch": 5.350849150849151, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 6699 + }, + { + "epoch": 5.351648351648351, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.899, + "step": 6700 + }, + { + "epoch": 5.352447552447552, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 6701 + }, + { + "epoch": 5.353246753246753, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 6702 + }, + { + "epoch": 5.354045954045954, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 6703 + }, + { + "epoch": 5.3548451548451546, + "grad_norm": 0.62109375, + "learning_rate": 0.0002, + "loss": 0.9075, + "step": 6704 + }, + { + "epoch": 5.355644355644356, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.8964, + "step": 6705 + }, + { + "epoch": 5.356443556443557, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.9011, + "step": 6706 + }, + { + "epoch": 5.357242757242757, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 6707 + }, + { + "epoch": 5.358041958041958, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.894, + "step": 6708 + }, + { + "epoch": 5.358841158841159, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8906, + "step": 6709 + }, + { + "epoch": 5.35964035964036, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 6710 + }, + { + "epoch": 5.36043956043956, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8852, + "step": 6711 + }, + { + "epoch": 5.361238761238761, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 6712 + }, + { + "epoch": 5.362037962037962, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8909, + "step": 6713 + }, + { + "epoch": 5.362837162837163, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 6714 + }, + { + "epoch": 5.363636363636363, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8855, + "step": 6715 + }, + { + "epoch": 5.364435564435564, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 6716 + }, + { + "epoch": 5.365234765234765, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 6717 + }, + { + "epoch": 5.366033966033966, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 6718 + }, + { + "epoch": 5.366833166833167, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8884, + "step": 6719 + }, + { + "epoch": 5.367632367632368, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8872, + "step": 6720 + }, + { + "epoch": 5.368431568431569, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8979, + "step": 6721 + }, + { + "epoch": 5.36923076923077, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8851, + "step": 6722 + }, + { + "epoch": 5.37002997002997, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.9013, + "step": 6723 + }, + { + "epoch": 5.370829170829171, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 6724 + }, + { + "epoch": 5.371628371628372, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8891, + "step": 6725 + }, + { + "epoch": 5.372427572427572, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8944, + "step": 6726 + }, + { + "epoch": 5.373226773226773, + "grad_norm": 1.328125, + "learning_rate": 0.0002, + "loss": 0.9317, + "step": 6727 + }, + { + "epoch": 5.374025974025974, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 6728 + }, + { + "epoch": 5.374825174825175, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8968, + "step": 6729 + }, + { + "epoch": 5.375624375624375, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8961, + "step": 6730 + }, + { + "epoch": 5.376423576423576, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 6731 + }, + { + "epoch": 5.377222777222777, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 6732 + }, + { + "epoch": 5.3780219780219785, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 6733 + }, + { + "epoch": 5.378821178821179, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8869, + "step": 6734 + }, + { + "epoch": 5.37962037962038, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8884, + "step": 6735 + }, + { + "epoch": 5.380419580419581, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.902, + "step": 6736 + }, + { + "epoch": 5.381218781218781, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8983, + "step": 6737 + }, + { + "epoch": 5.382017982017982, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 6738 + }, + { + "epoch": 5.382817182817183, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8985, + "step": 6739 + }, + { + "epoch": 5.383616383616384, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 6740 + }, + { + "epoch": 5.384415584415584, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 6741 + }, + { + "epoch": 5.385214785214785, + "grad_norm": 0.6796875, + "learning_rate": 0.0002, + "loss": 0.9122, + "step": 6742 + }, + { + "epoch": 5.386013986013986, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 6743 + }, + { + "epoch": 5.386813186813187, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.9029, + "step": 6744 + }, + { + "epoch": 5.387612387612387, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8899, + "step": 6745 + }, + { + "epoch": 5.388411588411588, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 6746 + }, + { + "epoch": 5.389210789210789, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 6747 + }, + { + "epoch": 5.39000999000999, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 6748 + }, + { + "epoch": 5.390809190809191, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 6749 + }, + { + "epoch": 5.391608391608392, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 6750 + }, + { + "epoch": 5.392407592407593, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.9156, + "step": 6751 + }, + { + "epoch": 5.393206793206793, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 6752 + }, + { + "epoch": 5.394005994005994, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8872, + "step": 6753 + }, + { + "epoch": 5.394805194805195, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8829, + "step": 6754 + }, + { + "epoch": 5.395604395604396, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 6755 + }, + { + "epoch": 5.396403596403596, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 6756 + }, + { + "epoch": 5.397202797202797, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 6757 + }, + { + "epoch": 5.398001998001998, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.8893, + "step": 6758 + }, + { + "epoch": 5.398801198801198, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 6759 + }, + { + "epoch": 5.399600399600399, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 6760 + }, + { + "epoch": 5.4003996003996, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8864, + "step": 6761 + }, + { + "epoch": 5.4011988011988015, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8914, + "step": 6762 + }, + { + "epoch": 5.401998001998002, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8995, + "step": 6763 + }, + { + "epoch": 5.402797202797203, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.9005, + "step": 6764 + }, + { + "epoch": 5.403596403596404, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8857, + "step": 6765 + }, + { + "epoch": 5.404395604395605, + "grad_norm": 0.25390625, + "learning_rate": 0.0002, + "loss": 0.8926, + "step": 6766 + }, + { + "epoch": 5.405194805194805, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8977, + "step": 6767 + }, + { + "epoch": 5.405994005994006, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8762, + "step": 6768 + }, + { + "epoch": 5.406793206793207, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 6769 + }, + { + "epoch": 5.407592407592408, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 6770 + }, + { + "epoch": 5.408391608391608, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8849, + "step": 6771 + }, + { + "epoch": 5.409190809190809, + "grad_norm": 0.25390625, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 6772 + }, + { + "epoch": 5.40999000999001, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 6773 + }, + { + "epoch": 5.41078921078921, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.9025, + "step": 6774 + }, + { + "epoch": 5.411588411588411, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 6775 + }, + { + "epoch": 5.4123876123876125, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.8853, + "step": 6776 + }, + { + "epoch": 5.4131868131868135, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 6777 + }, + { + "epoch": 5.413986013986014, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8862, + "step": 6778 + }, + { + "epoch": 5.414785214785215, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.8987, + "step": 6779 + }, + { + "epoch": 5.415584415584416, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 6780 + }, + { + "epoch": 5.416383616383617, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8994, + "step": 6781 + }, + { + "epoch": 5.417182817182817, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8877, + "step": 6782 + }, + { + "epoch": 5.417982017982018, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8837, + "step": 6783 + }, + { + "epoch": 5.418781218781219, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 6784 + }, + { + "epoch": 5.41958041958042, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 6785 + }, + { + "epoch": 5.42037962037962, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8858, + "step": 6786 + }, + { + "epoch": 5.421178821178821, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 6787 + }, + { + "epoch": 5.421978021978022, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 6788 + }, + { + "epoch": 5.422777222777222, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 6789 + }, + { + "epoch": 5.4235764235764234, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.885, + "step": 6790 + }, + { + "epoch": 5.4243756243756245, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 6791 + }, + { + "epoch": 5.4251748251748255, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8856, + "step": 6792 + }, + { + "epoch": 5.425974025974026, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8926, + "step": 6793 + }, + { + "epoch": 5.426773226773227, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 6794 + }, + { + "epoch": 5.427572427572428, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8855, + "step": 6795 + }, + { + "epoch": 5.428371628371629, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8831, + "step": 6796 + }, + { + "epoch": 5.429170829170829, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.8989, + "step": 6797 + }, + { + "epoch": 5.42997002997003, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 6798 + }, + { + "epoch": 5.430769230769231, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8979, + "step": 6799 + }, + { + "epoch": 5.431568431568431, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 6800 + }, + { + "epoch": 5.432367632367632, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 6801 + }, + { + "epoch": 5.433166833166833, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 6802 + }, + { + "epoch": 5.433966033966034, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 6803 + }, + { + "epoch": 5.434765234765234, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8871, + "step": 6804 + }, + { + "epoch": 5.4355644355644355, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.8856, + "step": 6805 + }, + { + "epoch": 5.4363636363636365, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8972, + "step": 6806 + }, + { + "epoch": 5.4371628371628375, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 6807 + }, + { + "epoch": 5.437962037962038, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.8938, + "step": 6808 + }, + { + "epoch": 5.438761238761239, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 6809 + }, + { + "epoch": 5.43956043956044, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 6810 + }, + { + "epoch": 5.44035964035964, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 6811 + }, + { + "epoch": 5.441158841158841, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.9021, + "step": 6812 + }, + { + "epoch": 5.441958041958042, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8886, + "step": 6813 + }, + { + "epoch": 5.442757242757243, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8891, + "step": 6814 + }, + { + "epoch": 5.443556443556443, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 6815 + }, + { + "epoch": 5.444355644355644, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8914, + "step": 6816 + }, + { + "epoch": 5.445154845154845, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8811, + "step": 6817 + }, + { + "epoch": 5.445954045954046, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 6818 + }, + { + "epoch": 5.4467532467532465, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 6819 + }, + { + "epoch": 5.4475524475524475, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 6820 + }, + { + "epoch": 5.4483516483516485, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8955, + "step": 6821 + }, + { + "epoch": 5.449150849150849, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 6822 + }, + { + "epoch": 5.44995004995005, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 6823 + }, + { + "epoch": 5.450749250749251, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 6824 + }, + { + "epoch": 5.451548451548452, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.9031, + "step": 6825 + }, + { + "epoch": 5.452347652347652, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 6826 + }, + { + "epoch": 5.453146853146853, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 6827 + }, + { + "epoch": 5.453946053946054, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 6828 + }, + { + "epoch": 5.454745254745255, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.9052, + "step": 6829 + }, + { + "epoch": 5.455544455544455, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8991, + "step": 6830 + }, + { + "epoch": 5.456343656343656, + "grad_norm": 0.25390625, + "learning_rate": 0.0002, + "loss": 0.8832, + "step": 6831 + }, + { + "epoch": 5.457142857142857, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8803, + "step": 6832 + }, + { + "epoch": 5.457942057942058, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 6833 + }, + { + "epoch": 5.4587412587412585, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8955, + "step": 6834 + }, + { + "epoch": 5.4595404595404595, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8861, + "step": 6835 + }, + { + "epoch": 5.460339660339661, + "grad_norm": 1.8359375, + "learning_rate": 0.0002, + "loss": 0.9324, + "step": 6836 + }, + { + "epoch": 5.461138861138862, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8798, + "step": 6837 + }, + { + "epoch": 5.461938061938062, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 6838 + }, + { + "epoch": 5.462737262737263, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 6839 + }, + { + "epoch": 5.463536463536464, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 6840 + }, + { + "epoch": 5.464335664335664, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 6841 + }, + { + "epoch": 5.465134865134865, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 6842 + }, + { + "epoch": 5.465934065934066, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8969, + "step": 6843 + }, + { + "epoch": 5.466733266733267, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 6844 + }, + { + "epoch": 5.467532467532467, + "grad_norm": 0.73046875, + "learning_rate": 0.0002, + "loss": 0.8993, + "step": 6845 + }, + { + "epoch": 5.468331668331668, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8964, + "step": 6846 + }, + { + "epoch": 5.469130869130869, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 6847 + }, + { + "epoch": 5.46993006993007, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 6848 + }, + { + "epoch": 5.4707292707292705, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 6849 + }, + { + "epoch": 5.4715284715284715, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 6850 + }, + { + "epoch": 5.472327672327673, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 6851 + }, + { + "epoch": 5.473126873126873, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 6852 + }, + { + "epoch": 5.473926073926074, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 6853 + }, + { + "epoch": 5.474725274725275, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 6854 + }, + { + "epoch": 5.475524475524476, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8888, + "step": 6855 + }, + { + "epoch": 5.476323676323676, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 6856 + }, + { + "epoch": 5.477122877122877, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 6857 + }, + { + "epoch": 5.477922077922078, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8909, + "step": 6858 + }, + { + "epoch": 5.478721278721279, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 6859 + }, + { + "epoch": 5.479520479520479, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9042, + "step": 6860 + }, + { + "epoch": 5.48031968031968, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 6861 + }, + { + "epoch": 5.481118881118881, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 6862 + }, + { + "epoch": 5.4819180819180815, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 6863 + }, + { + "epoch": 5.4827172827172825, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 6864 + }, + { + "epoch": 5.483516483516484, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8944, + "step": 6865 + }, + { + "epoch": 5.484315684315685, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 6866 + }, + { + "epoch": 5.485114885114885, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 6867 + }, + { + "epoch": 5.485914085914086, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8823, + "step": 6868 + }, + { + "epoch": 5.486713286713287, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.8961, + "step": 6869 + }, + { + "epoch": 5.487512487512488, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 6870 + }, + { + "epoch": 5.488311688311688, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 6871 + }, + { + "epoch": 5.489110889110889, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8861, + "step": 6872 + }, + { + "epoch": 5.48991008991009, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 6873 + }, + { + "epoch": 5.49070929070929, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 6874 + }, + { + "epoch": 5.491508491508491, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8866, + "step": 6875 + }, + { + "epoch": 5.492307692307692, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.8965, + "step": 6876 + }, + { + "epoch": 5.493106893106893, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8867, + "step": 6877 + }, + { + "epoch": 5.4939060939060935, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 6878 + }, + { + "epoch": 5.494705294705295, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 6879 + }, + { + "epoch": 5.495504495504496, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.9021, + "step": 6880 + }, + { + "epoch": 5.496303696303697, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 6881 + }, + { + "epoch": 5.497102897102897, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 6882 + }, + { + "epoch": 5.497902097902098, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 6883 + }, + { + "epoch": 5.498701298701299, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8844, + "step": 6884 + }, + { + "epoch": 5.4995004995005, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.8861, + "step": 6885 + }, + { + "epoch": 5.5002997002997, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8807, + "step": 6886 + }, + { + "epoch": 5.501098901098901, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 6887 + }, + { + "epoch": 5.501898101898102, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 6888 + }, + { + "epoch": 5.502697302697303, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8834, + "step": 6889 + }, + { + "epoch": 5.503496503496503, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8895, + "step": 6890 + }, + { + "epoch": 5.504295704295704, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 6891 + }, + { + "epoch": 5.505094905094905, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 6892 + }, + { + "epoch": 5.5058941058941056, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8862, + "step": 6893 + }, + { + "epoch": 5.506693306693307, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.8969, + "step": 6894 + }, + { + "epoch": 5.507492507492508, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8861, + "step": 6895 + }, + { + "epoch": 5.508291708291709, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8964, + "step": 6896 + }, + { + "epoch": 5.509090909090909, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 6897 + }, + { + "epoch": 5.50989010989011, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 6898 + }, + { + "epoch": 5.510689310689311, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.894, + "step": 6899 + }, + { + "epoch": 5.511488511488512, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8884, + "step": 6900 + }, + { + "epoch": 5.512287712287712, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8926, + "step": 6901 + }, + { + "epoch": 5.513086913086913, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 6902 + }, + { + "epoch": 5.513886113886114, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8854, + "step": 6903 + }, + { + "epoch": 5.514685314685314, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 6904 + }, + { + "epoch": 5.515484515484515, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 6905 + }, + { + "epoch": 5.516283716283716, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8847, + "step": 6906 + }, + { + "epoch": 5.517082917082917, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8808, + "step": 6907 + }, + { + "epoch": 5.517882117882118, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 6908 + }, + { + "epoch": 5.518681318681319, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.887, + "step": 6909 + }, + { + "epoch": 5.51948051948052, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 6910 + }, + { + "epoch": 5.520279720279721, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8923, + "step": 6911 + }, + { + "epoch": 5.521078921078921, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 6912 + }, + { + "epoch": 5.521878121878122, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.8938, + "step": 6913 + }, + { + "epoch": 5.522677322677323, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8968, + "step": 6914 + }, + { + "epoch": 5.523476523476523, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 6915 + }, + { + "epoch": 5.524275724275724, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 6916 + }, + { + "epoch": 5.525074925074925, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8814, + "step": 6917 + }, + { + "epoch": 5.525874125874126, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.8858, + "step": 6918 + }, + { + "epoch": 5.526673326673326, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.8942, + "step": 6919 + }, + { + "epoch": 5.527472527472527, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8871, + "step": 6920 + }, + { + "epoch": 5.528271728271728, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8831, + "step": 6921 + }, + { + "epoch": 5.5290709290709295, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8926, + "step": 6922 + }, + { + "epoch": 5.52987012987013, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 6923 + }, + { + "epoch": 5.530669330669331, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 6924 + }, + { + "epoch": 5.531468531468532, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 6925 + }, + { + "epoch": 5.532267732267732, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 6926 + }, + { + "epoch": 5.533066933066933, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.903, + "step": 6927 + }, + { + "epoch": 5.533866133866134, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 6928 + }, + { + "epoch": 5.534665334665335, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.8808, + "step": 6929 + }, + { + "epoch": 5.535464535464535, + "grad_norm": 0.71484375, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 6930 + }, + { + "epoch": 5.536263736263736, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 6931 + }, + { + "epoch": 5.537062937062937, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8926, + "step": 6932 + }, + { + "epoch": 5.537862137862138, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 6933 + }, + { + "epoch": 5.538661338661338, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 6934 + }, + { + "epoch": 5.539460539460539, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 6935 + }, + { + "epoch": 5.54025974025974, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 6936 + }, + { + "epoch": 5.541058941058941, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 6937 + }, + { + "epoch": 5.541858141858142, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.8812, + "step": 6938 + }, + { + "epoch": 5.542657342657343, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.881, + "step": 6939 + }, + { + "epoch": 5.543456543456544, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 6940 + }, + { + "epoch": 5.544255744255745, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 6941 + }, + { + "epoch": 5.545054945054945, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 6942 + }, + { + "epoch": 5.545854145854146, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8872, + "step": 6943 + }, + { + "epoch": 5.546653346653347, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 6944 + }, + { + "epoch": 5.547452547452547, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.8817, + "step": 6945 + }, + { + "epoch": 5.548251748251748, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.8876, + "step": 6946 + }, + { + "epoch": 5.549050949050949, + "grad_norm": 1.5703125, + "learning_rate": 0.0002, + "loss": 0.9223, + "step": 6947 + }, + { + "epoch": 5.54985014985015, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.8851, + "step": 6948 + }, + { + "epoch": 5.55064935064935, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.8884, + "step": 6949 + }, + { + "epoch": 5.551448551448551, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 6950 + }, + { + "epoch": 5.5522477522477525, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.9045, + "step": 6951 + }, + { + "epoch": 5.5530469530469535, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 6952 + }, + { + "epoch": 5.553846153846154, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 6953 + }, + { + "epoch": 5.554645354645355, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8879, + "step": 6954 + }, + { + "epoch": 5.555444555444556, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.8875, + "step": 6955 + }, + { + "epoch": 5.556243756243756, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 6956 + }, + { + "epoch": 5.557042957042957, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 6957 + }, + { + "epoch": 5.557842157842158, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 6958 + }, + { + "epoch": 5.558641358641359, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8837, + "step": 6959 + }, + { + "epoch": 5.559440559440559, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 6960 + }, + { + "epoch": 5.56023976023976, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 6961 + }, + { + "epoch": 5.561038961038961, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 6962 + }, + { + "epoch": 5.561838161838162, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 6963 + }, + { + "epoch": 5.562637362637362, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8961, + "step": 6964 + }, + { + "epoch": 5.5634365634365635, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.894, + "step": 6965 + }, + { + "epoch": 5.5642357642357645, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 6966 + }, + { + "epoch": 5.565034965034965, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 6967 + }, + { + "epoch": 5.565834165834166, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 6968 + }, + { + "epoch": 5.566633366633367, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8883, + "step": 6969 + }, + { + "epoch": 5.567432567432568, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.8914, + "step": 6970 + }, + { + "epoch": 5.568231768231768, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 6971 + }, + { + "epoch": 5.569030969030969, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8895, + "step": 6972 + }, + { + "epoch": 5.56983016983017, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 6973 + }, + { + "epoch": 5.570629370629371, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 6974 + }, + { + "epoch": 5.571428571428571, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 6975 + }, + { + "epoch": 5.572227772227772, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 6976 + }, + { + "epoch": 5.573026973026973, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.8851, + "step": 6977 + }, + { + "epoch": 5.573826173826173, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 6978 + }, + { + "epoch": 5.574625374625374, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 6979 + }, + { + "epoch": 5.5754245754245755, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.8969, + "step": 6980 + }, + { + "epoch": 5.5762237762237765, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 6981 + }, + { + "epoch": 5.577022977022977, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 6982 + }, + { + "epoch": 5.577822177822178, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 6983 + }, + { + "epoch": 5.578621378621379, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.894, + "step": 6984 + }, + { + "epoch": 5.57942057942058, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8857, + "step": 6985 + }, + { + "epoch": 5.58021978021978, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8957, + "step": 6986 + }, + { + "epoch": 5.581018981018981, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.9047, + "step": 6987 + }, + { + "epoch": 5.581818181818182, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 6988 + }, + { + "epoch": 5.582617382617382, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 6989 + }, + { + "epoch": 5.583416583416583, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.9002, + "step": 6990 + }, + { + "epoch": 5.584215784215784, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.8869, + "step": 6991 + }, + { + "epoch": 5.585014985014985, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 6992 + }, + { + "epoch": 5.585814185814185, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 6993 + }, + { + "epoch": 5.5866133866133865, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8863, + "step": 6994 + }, + { + "epoch": 5.5874125874125875, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 6995 + }, + { + "epoch": 5.5882117882117885, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 6996 + }, + { + "epoch": 5.589010989010989, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 6997 + }, + { + "epoch": 5.58981018981019, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8944, + "step": 6998 + }, + { + "epoch": 5.590609390609391, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 6999 + }, + { + "epoch": 5.591408591408591, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 7000 + }, + { + "epoch": 5.592207792207792, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 7001 + }, + { + "epoch": 5.593006993006993, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.8837, + "step": 7002 + }, + { + "epoch": 5.593806193806194, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 7003 + }, + { + "epoch": 5.594605394605395, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.8944, + "step": 7004 + }, + { + "epoch": 5.595404595404595, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.8886, + "step": 7005 + }, + { + "epoch": 5.596203796203796, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.9025, + "step": 7006 + }, + { + "epoch": 5.597002997002997, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 7007 + }, + { + "epoch": 5.5978021978021975, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.8847, + "step": 7008 + }, + { + "epoch": 5.5986013986013985, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8869, + "step": 7009 + }, + { + "epoch": 5.5994005994005995, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.887, + "step": 7010 + }, + { + "epoch": 5.600199800199801, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 7011 + }, + { + "epoch": 5.600999000999001, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 7012 + }, + { + "epoch": 5.601798201798202, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.8969, + "step": 7013 + }, + { + "epoch": 5.602597402597403, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 7014 + }, + { + "epoch": 5.603396603396604, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 7015 + }, + { + "epoch": 5.604195804195804, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.883, + "step": 7016 + }, + { + "epoch": 5.604995004995005, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.8841, + "step": 7017 + }, + { + "epoch": 5.605794205794206, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.9004, + "step": 7018 + }, + { + "epoch": 5.606593406593406, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 7019 + }, + { + "epoch": 5.607392607392607, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.8798, + "step": 7020 + }, + { + "epoch": 5.608191808191808, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 7021 + }, + { + "epoch": 5.608991008991009, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 7022 + }, + { + "epoch": 5.6097902097902095, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 7023 + }, + { + "epoch": 5.6105894105894105, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 7024 + }, + { + "epoch": 5.611388611388612, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.8972, + "step": 7025 + }, + { + "epoch": 5.612187812187813, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.8926, + "step": 7026 + }, + { + "epoch": 5.612987012987013, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.9009, + "step": 7027 + }, + { + "epoch": 5.613786213786214, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 7028 + }, + { + "epoch": 5.614585414585415, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 7029 + }, + { + "epoch": 5.615384615384615, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 7030 + }, + { + "epoch": 5.616183816183816, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 7031 + }, + { + "epoch": 5.616983016983017, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 7032 + }, + { + "epoch": 5.617782217782218, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 7033 + }, + { + "epoch": 5.618581418581418, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.8906, + "step": 7034 + }, + { + "epoch": 5.619380619380619, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 7035 + }, + { + "epoch": 5.62017982017982, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 7036 + }, + { + "epoch": 5.620979020979021, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 7037 + }, + { + "epoch": 5.6217782217782215, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 7038 + }, + { + "epoch": 5.6225774225774225, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.8867, + "step": 7039 + }, + { + "epoch": 5.623376623376624, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.8858, + "step": 7040 + }, + { + "epoch": 5.624175824175824, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.8899, + "step": 7041 + }, + { + "epoch": 5.624975024975025, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 7042 + }, + { + "epoch": 5.625774225774226, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.8823, + "step": 7043 + }, + { + "epoch": 5.626573426573427, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 7044 + }, + { + "epoch": 5.627372627372627, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 7045 + }, + { + "epoch": 5.628171828171828, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 7046 + }, + { + "epoch": 5.628971028971029, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.8858, + "step": 7047 + }, + { + "epoch": 5.62977022977023, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 7048 + }, + { + "epoch": 5.63056943056943, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 7049 + }, + { + "epoch": 5.631368631368631, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.8845, + "step": 7050 + }, + { + "epoch": 5.632167832167832, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 7051 + }, + { + "epoch": 5.6329670329670325, + "grad_norm": 0.66015625, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 7052 + }, + { + "epoch": 5.6337662337662335, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.883, + "step": 7053 + }, + { + "epoch": 5.634565434565435, + "grad_norm": 0.6328125, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 7054 + }, + { + "epoch": 5.635364635364636, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 7055 + }, + { + "epoch": 5.636163836163837, + "grad_norm": 0.62109375, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 7056 + }, + { + "epoch": 5.636963036963037, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.8891, + "step": 7057 + }, + { + "epoch": 5.637762237762238, + "grad_norm": 0.609375, + "learning_rate": 0.0002, + "loss": 0.8973, + "step": 7058 + }, + { + "epoch": 5.638561438561439, + "grad_norm": 0.6484375, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 7059 + }, + { + "epoch": 5.639360639360639, + "grad_norm": 0.80859375, + "learning_rate": 0.0002, + "loss": 0.8974, + "step": 7060 + }, + { + "epoch": 5.64015984015984, + "grad_norm": 0.609375, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 7061 + }, + { + "epoch": 5.640959040959041, + "grad_norm": 0.59375, + "learning_rate": 0.0002, + "loss": 0.8836, + "step": 7062 + }, + { + "epoch": 5.641758241758242, + "grad_norm": 0.6484375, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 7063 + }, + { + "epoch": 5.642557442557442, + "grad_norm": 0.5859375, + "learning_rate": 0.0002, + "loss": 0.8972, + "step": 7064 + }, + { + "epoch": 5.643356643356643, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 7065 + }, + { + "epoch": 5.644155844155844, + "grad_norm": 0.62890625, + "learning_rate": 0.0002, + "loss": 0.8866, + "step": 7066 + }, + { + "epoch": 5.644955044955045, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 7067 + }, + { + "epoch": 5.645754245754246, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 7068 + }, + { + "epoch": 5.646553446553447, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 7069 + }, + { + "epoch": 5.647352647352648, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.8914, + "step": 7070 + }, + { + "epoch": 5.648151848151848, + "grad_norm": 0.640625, + "learning_rate": 0.0002, + "loss": 0.8983, + "step": 7071 + }, + { + "epoch": 5.648951048951049, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 7072 + }, + { + "epoch": 5.64975024975025, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 7073 + }, + { + "epoch": 5.650549450549451, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.8871, + "step": 7074 + }, + { + "epoch": 5.651348651348651, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 7075 + }, + { + "epoch": 5.652147852147852, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 7076 + }, + { + "epoch": 5.652947052947053, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 7077 + }, + { + "epoch": 5.653746253746254, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 7078 + }, + { + "epoch": 5.654545454545454, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 7079 + }, + { + "epoch": 5.655344655344655, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.8877, + "step": 7080 + }, + { + "epoch": 5.656143856143856, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 7081 + }, + { + "epoch": 5.6569430569430565, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.8938, + "step": 7082 + }, + { + "epoch": 5.657742257742258, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.8837, + "step": 7083 + }, + { + "epoch": 5.658541458541459, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 7084 + }, + { + "epoch": 5.65934065934066, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 7085 + }, + { + "epoch": 5.66013986013986, + "grad_norm": 0.640625, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 7086 + }, + { + "epoch": 5.660939060939061, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.9013, + "step": 7087 + }, + { + "epoch": 5.661738261738262, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 7088 + }, + { + "epoch": 5.662537462537463, + "grad_norm": 0.58203125, + "learning_rate": 0.0002, + "loss": 0.8889, + "step": 7089 + }, + { + "epoch": 5.663336663336663, + "grad_norm": 0.61328125, + "learning_rate": 0.0002, + "loss": 0.8845, + "step": 7090 + }, + { + "epoch": 5.664135864135864, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.8965, + "step": 7091 + }, + { + "epoch": 5.664935064935065, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.8866, + "step": 7092 + }, + { + "epoch": 5.665734265734265, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.887, + "step": 7093 + }, + { + "epoch": 5.666533466533466, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8844, + "step": 7094 + }, + { + "epoch": 5.667332667332667, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 7095 + }, + { + "epoch": 5.668131868131868, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.8899, + "step": 7096 + }, + { + "epoch": 5.668931068931069, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.8864, + "step": 7097 + }, + { + "epoch": 5.66973026973027, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8837, + "step": 7098 + }, + { + "epoch": 5.670529470529471, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 7099 + }, + { + "epoch": 5.671328671328672, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 7100 + }, + { + "epoch": 5.672127872127872, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 7101 + }, + { + "epoch": 5.672927072927073, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 7102 + }, + { + "epoch": 5.673726273726274, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 7103 + }, + { + "epoch": 5.674525474525474, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 7104 + }, + { + "epoch": 5.675324675324675, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.8825, + "step": 7105 + }, + { + "epoch": 5.676123876123876, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 7106 + }, + { + "epoch": 5.676923076923077, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 7107 + }, + { + "epoch": 5.677722277722278, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 7108 + }, + { + "epoch": 5.678521478521478, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8888, + "step": 7109 + }, + { + "epoch": 5.679320679320679, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 7110 + }, + { + "epoch": 5.6801198801198804, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8838, + "step": 7111 + }, + { + "epoch": 5.680919080919081, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8841, + "step": 7112 + }, + { + "epoch": 5.681718281718282, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8972, + "step": 7113 + }, + { + "epoch": 5.682517482517483, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 7114 + }, + { + "epoch": 5.683316683316683, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 7115 + }, + { + "epoch": 5.684115884115884, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 7116 + }, + { + "epoch": 5.684915084915085, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8872, + "step": 7117 + }, + { + "epoch": 5.685714285714286, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8906, + "step": 7118 + }, + { + "epoch": 5.686513486513487, + "grad_norm": 0.25390625, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 7119 + }, + { + "epoch": 5.687312687312687, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8825, + "step": 7120 + }, + { + "epoch": 5.688111888111888, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 7121 + }, + { + "epoch": 5.688911088911089, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8899, + "step": 7122 + }, + { + "epoch": 5.689710289710289, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8876, + "step": 7123 + }, + { + "epoch": 5.69050949050949, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8786, + "step": 7124 + }, + { + "epoch": 5.691308691308691, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 7125 + }, + { + "epoch": 5.6921078921078925, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 7126 + }, + { + "epoch": 5.692907092907093, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8899, + "step": 7127 + }, + { + "epoch": 5.693706293706294, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8803, + "step": 7128 + }, + { + "epoch": 5.694505494505495, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 7129 + }, + { + "epoch": 5.695304695304696, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 7130 + }, + { + "epoch": 5.696103896103896, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8843, + "step": 7131 + }, + { + "epoch": 5.696903096903097, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8871, + "step": 7132 + }, + { + "epoch": 5.697702297702298, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8871, + "step": 7133 + }, + { + "epoch": 5.698501498501498, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.8944, + "step": 7134 + }, + { + "epoch": 5.699300699300699, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 7135 + }, + { + "epoch": 5.7000999000999, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8787, + "step": 7136 + }, + { + "epoch": 5.700899100899101, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.885, + "step": 7137 + }, + { + "epoch": 5.701698301698301, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8824, + "step": 7138 + }, + { + "epoch": 5.702497502497502, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8961, + "step": 7139 + }, + { + "epoch": 5.7032967032967035, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 7140 + }, + { + "epoch": 5.7040959040959045, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8875, + "step": 7141 + }, + { + "epoch": 5.704895104895105, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 7142 + }, + { + "epoch": 5.705694305694306, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 7143 + }, + { + "epoch": 5.706493506493507, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 7144 + }, + { + "epoch": 5.707292707292707, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.8871, + "step": 7145 + }, + { + "epoch": 5.708091908091908, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 7146 + }, + { + "epoch": 5.708891108891109, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 7147 + }, + { + "epoch": 5.70969030969031, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 7148 + }, + { + "epoch": 5.71048951048951, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8899, + "step": 7149 + }, + { + "epoch": 5.711288711288711, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8853, + "step": 7150 + }, + { + "epoch": 5.712087912087912, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 7151 + }, + { + "epoch": 5.712887112887113, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8833, + "step": 7152 + }, + { + "epoch": 5.713686313686313, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8856, + "step": 7153 + }, + { + "epoch": 5.7144855144855145, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8773, + "step": 7154 + }, + { + "epoch": 5.7152847152847155, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8803, + "step": 7155 + }, + { + "epoch": 5.716083916083916, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 7156 + }, + { + "epoch": 5.716883116883117, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 7157 + }, + { + "epoch": 5.717682317682318, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8824, + "step": 7158 + }, + { + "epoch": 5.718481518481519, + "grad_norm": 0.69921875, + "learning_rate": 0.0002, + "loss": 0.8987, + "step": 7159 + }, + { + "epoch": 5.719280719280719, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8973, + "step": 7160 + }, + { + "epoch": 5.72007992007992, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8799, + "step": 7161 + }, + { + "epoch": 5.720879120879121, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.8803, + "step": 7162 + }, + { + "epoch": 5.721678321678322, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8909, + "step": 7163 + }, + { + "epoch": 5.722477522477522, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 7164 + }, + { + "epoch": 5.723276723276723, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.881, + "step": 7165 + }, + { + "epoch": 5.724075924075924, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8865, + "step": 7166 + }, + { + "epoch": 5.724875124875124, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8809, + "step": 7167 + }, + { + "epoch": 5.725674325674325, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8836, + "step": 7168 + }, + { + "epoch": 5.7264735264735265, + "grad_norm": 0.2490234375, + "learning_rate": 0.0002, + "loss": 0.8811, + "step": 7169 + }, + { + "epoch": 5.7272727272727275, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 7170 + }, + { + "epoch": 5.7280719280719286, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 7171 + }, + { + "epoch": 5.728871128871129, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 7172 + }, + { + "epoch": 5.72967032967033, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 7173 + }, + { + "epoch": 5.730469530469531, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 7174 + }, + { + "epoch": 5.731268731268731, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 7175 + }, + { + "epoch": 5.732067932067932, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.8872, + "step": 7176 + }, + { + "epoch": 5.732867132867133, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 7177 + }, + { + "epoch": 5.733666333666334, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.8889, + "step": 7178 + }, + { + "epoch": 5.734465534465534, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8863, + "step": 7179 + }, + { + "epoch": 5.735264735264735, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 7180 + }, + { + "epoch": 5.736063936063936, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 7181 + }, + { + "epoch": 5.736863136863137, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 7182 + }, + { + "epoch": 5.7376623376623375, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 7183 + }, + { + "epoch": 5.7384615384615385, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 7184 + }, + { + "epoch": 5.7392607392607395, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 7185 + }, + { + "epoch": 5.74005994005994, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 7186 + }, + { + "epoch": 5.740859140859141, + "grad_norm": 0.7734375, + "learning_rate": 0.0002, + "loss": 0.8876, + "step": 7187 + }, + { + "epoch": 5.741658341658342, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 7188 + }, + { + "epoch": 5.742457542457543, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8957, + "step": 7189 + }, + { + "epoch": 5.743256743256743, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.8957, + "step": 7190 + }, + { + "epoch": 5.744055944055944, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.881, + "step": 7191 + }, + { + "epoch": 5.744855144855145, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8899, + "step": 7192 + }, + { + "epoch": 5.745654345654346, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 7193 + }, + { + "epoch": 5.746453546453546, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8787, + "step": 7194 + }, + { + "epoch": 5.747252747252747, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 7195 + }, + { + "epoch": 5.748051948051948, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 7196 + }, + { + "epoch": 5.7488511488511485, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.8911, + "step": 7197 + }, + { + "epoch": 5.7496503496503495, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8859, + "step": 7198 + }, + { + "epoch": 5.7504495504495505, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8845, + "step": 7199 + }, + { + "epoch": 5.751248751248752, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 7200 + }, + { + "epoch": 5.752047952047952, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 7201 + }, + { + "epoch": 5.752847152847153, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8866, + "step": 7202 + }, + { + "epoch": 5.753646353646354, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8923, + "step": 7203 + }, + { + "epoch": 5.754445554445555, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 7204 + }, + { + "epoch": 5.755244755244755, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8998, + "step": 7205 + }, + { + "epoch": 5.756043956043956, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8877, + "step": 7206 + }, + { + "epoch": 5.756843156843157, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 7207 + }, + { + "epoch": 5.757642357642357, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8968, + "step": 7208 + }, + { + "epoch": 5.758441558441558, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 7209 + }, + { + "epoch": 5.759240759240759, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 7210 + }, + { + "epoch": 5.76003996003996, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 7211 + }, + { + "epoch": 5.7608391608391605, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8899, + "step": 7212 + }, + { + "epoch": 5.7616383616383615, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 7213 + }, + { + "epoch": 5.7624375624375626, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8824, + "step": 7214 + }, + { + "epoch": 5.763236763236764, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 7215 + }, + { + "epoch": 5.764035964035964, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 7216 + }, + { + "epoch": 5.764835164835165, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8985, + "step": 7217 + }, + { + "epoch": 5.765634365634366, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8837, + "step": 7218 + }, + { + "epoch": 5.766433566433566, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8992, + "step": 7219 + }, + { + "epoch": 5.767232767232767, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 7220 + }, + { + "epoch": 5.768031968031968, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8978, + "step": 7221 + }, + { + "epoch": 5.768831168831169, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 7222 + }, + { + "epoch": 5.76963036963037, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 7223 + }, + { + "epoch": 5.77042957042957, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8825, + "step": 7224 + }, + { + "epoch": 5.771228771228771, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8835, + "step": 7225 + }, + { + "epoch": 5.772027972027972, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 7226 + }, + { + "epoch": 5.7728271728271725, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 7227 + }, + { + "epoch": 5.7736263736263735, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 7228 + }, + { + "epoch": 5.774425574425575, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 7229 + }, + { + "epoch": 5.775224775224775, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8862, + "step": 7230 + }, + { + "epoch": 5.776023976023976, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8875, + "step": 7231 + }, + { + "epoch": 5.776823176823177, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8866, + "step": 7232 + }, + { + "epoch": 5.777622377622378, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 7233 + }, + { + "epoch": 5.778421578421579, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 7234 + }, + { + "epoch": 5.779220779220779, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8839, + "step": 7235 + }, + { + "epoch": 5.78001998001998, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 7236 + }, + { + "epoch": 5.780819180819181, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8895, + "step": 7237 + }, + { + "epoch": 5.781618381618381, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 7238 + }, + { + "epoch": 5.782417582417582, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 7239 + }, + { + "epoch": 5.783216783216783, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8849, + "step": 7240 + }, + { + "epoch": 5.784015984015984, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 7241 + }, + { + "epoch": 5.7848151848151845, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.8846, + "step": 7242 + }, + { + "epoch": 5.785614385614386, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8817, + "step": 7243 + }, + { + "epoch": 5.786413586413587, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8829, + "step": 7244 + }, + { + "epoch": 5.787212787212788, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 7245 + }, + { + "epoch": 5.788011988011988, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.8923, + "step": 7246 + }, + { + "epoch": 5.788811188811189, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.8863, + "step": 7247 + }, + { + "epoch": 5.78961038961039, + "grad_norm": 0.66796875, + "learning_rate": 0.0002, + "loss": 0.887, + "step": 7248 + }, + { + "epoch": 5.79040959040959, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.8852, + "step": 7249 + }, + { + "epoch": 5.791208791208791, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 7250 + }, + { + "epoch": 5.792007992007992, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 7251 + }, + { + "epoch": 5.792807192807193, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 7252 + }, + { + "epoch": 5.793606393606393, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.8832, + "step": 7253 + }, + { + "epoch": 5.794405594405594, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8863, + "step": 7254 + }, + { + "epoch": 5.795204795204795, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8811, + "step": 7255 + }, + { + "epoch": 5.796003996003996, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 7256 + }, + { + "epoch": 5.796803196803197, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8775, + "step": 7257 + }, + { + "epoch": 5.797602397602398, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8969, + "step": 7258 + }, + { + "epoch": 5.798401598401599, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 7259 + }, + { + "epoch": 5.799200799200799, + "grad_norm": 0.9609375, + "learning_rate": 0.0002, + "loss": 0.9165, + "step": 7260 + }, + { + "epoch": 5.8, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8964, + "step": 7261 + }, + { + "epoch": 5.800799200799201, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8957, + "step": 7262 + }, + { + "epoch": 5.801598401598402, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8844, + "step": 7263 + }, + { + "epoch": 5.802397602397602, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8846, + "step": 7264 + }, + { + "epoch": 5.803196803196803, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8857, + "step": 7265 + }, + { + "epoch": 5.803996003996004, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8867, + "step": 7266 + }, + { + "epoch": 5.804795204795205, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8811, + "step": 7267 + }, + { + "epoch": 5.805594405594405, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8832, + "step": 7268 + }, + { + "epoch": 5.806393606393606, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 7269 + }, + { + "epoch": 5.807192807192807, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 7270 + }, + { + "epoch": 5.8079920079920075, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8978, + "step": 7271 + }, + { + "epoch": 5.808791208791209, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8875, + "step": 7272 + }, + { + "epoch": 5.80959040959041, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8872, + "step": 7273 + }, + { + "epoch": 5.810389610389611, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 7274 + }, + { + "epoch": 5.811188811188811, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 7275 + }, + { + "epoch": 5.811988011988012, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 7276 + }, + { + "epoch": 5.812787212787213, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8966, + "step": 7277 + }, + { + "epoch": 5.813586413586414, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 7278 + }, + { + "epoch": 5.814385614385614, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 7279 + }, + { + "epoch": 5.815184815184815, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 7280 + }, + { + "epoch": 5.815984015984016, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 7281 + }, + { + "epoch": 5.816783216783216, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8857, + "step": 7282 + }, + { + "epoch": 5.817582417582417, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8909, + "step": 7283 + }, + { + "epoch": 5.818381618381618, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8832, + "step": 7284 + }, + { + "epoch": 5.819180819180819, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8854, + "step": 7285 + }, + { + "epoch": 5.8199800199800205, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8867, + "step": 7286 + }, + { + "epoch": 5.820779220779221, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.8858, + "step": 7287 + }, + { + "epoch": 5.821578421578422, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 7288 + }, + { + "epoch": 5.822377622377623, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8848, + "step": 7289 + }, + { + "epoch": 5.823176823176823, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.8858, + "step": 7290 + }, + { + "epoch": 5.823976023976024, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8917, + "step": 7291 + }, + { + "epoch": 5.824775224775225, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 7292 + }, + { + "epoch": 5.825574425574426, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 7293 + }, + { + "epoch": 5.826373626373626, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 7294 + }, + { + "epoch": 5.827172827172827, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.8899, + "step": 7295 + }, + { + "epoch": 5.827972027972028, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 7296 + }, + { + "epoch": 5.828771228771229, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8799, + "step": 7297 + }, + { + "epoch": 5.829570429570429, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8938, + "step": 7298 + }, + { + "epoch": 5.83036963036963, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8944, + "step": 7299 + }, + { + "epoch": 5.8311688311688314, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 7300 + }, + { + "epoch": 5.831968031968032, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8855, + "step": 7301 + }, + { + "epoch": 5.832767232767233, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.8865, + "step": 7302 + }, + { + "epoch": 5.833566433566434, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8863, + "step": 7303 + }, + { + "epoch": 5.834365634365635, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 7304 + }, + { + "epoch": 5.835164835164835, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 7305 + }, + { + "epoch": 5.835964035964036, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 7306 + }, + { + "epoch": 5.836763236763237, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 7307 + }, + { + "epoch": 5.837562437562438, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.8909, + "step": 7308 + }, + { + "epoch": 5.838361638361638, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 7309 + }, + { + "epoch": 5.839160839160839, + "grad_norm": 0.65625, + "learning_rate": 0.0002, + "loss": 0.8884, + "step": 7310 + }, + { + "epoch": 5.83996003996004, + "grad_norm": 1.5703125, + "learning_rate": 0.0002, + "loss": 0.9149, + "step": 7311 + }, + { + "epoch": 5.84075924075924, + "grad_norm": 0.58203125, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 7312 + }, + { + "epoch": 5.841558441558441, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 7313 + }, + { + "epoch": 5.842357642357642, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.8832, + "step": 7314 + }, + { + "epoch": 5.8431568431568435, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.8822, + "step": 7315 + }, + { + "epoch": 5.843956043956044, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.8886, + "step": 7316 + }, + { + "epoch": 5.844755244755245, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 7317 + }, + { + "epoch": 5.845554445554446, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 7318 + }, + { + "epoch": 5.846353646353647, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.8872, + "step": 7319 + }, + { + "epoch": 5.847152847152847, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 7320 + }, + { + "epoch": 5.847952047952048, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 7321 + }, + { + "epoch": 5.848751248751249, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 7322 + }, + { + "epoch": 5.849550449550449, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.8909, + "step": 7323 + }, + { + "epoch": 5.85034965034965, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.8955, + "step": 7324 + }, + { + "epoch": 5.851148851148851, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.8889, + "step": 7325 + }, + { + "epoch": 5.851948051948052, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.8837, + "step": 7326 + }, + { + "epoch": 5.852747252747252, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.8791, + "step": 7327 + }, + { + "epoch": 5.853546453546453, + "grad_norm": 0.74609375, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 7328 + }, + { + "epoch": 5.8543456543456545, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 7329 + }, + { + "epoch": 5.8551448551448555, + "grad_norm": 0.65234375, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 7330 + }, + { + "epoch": 5.855944055944056, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 7331 + }, + { + "epoch": 5.856743256743257, + "grad_norm": 0.63671875, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 7332 + }, + { + "epoch": 5.857542457542458, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.8826, + "step": 7333 + }, + { + "epoch": 5.858341658341658, + "grad_norm": 0.63671875, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 7334 + }, + { + "epoch": 5.859140859140859, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 7335 + }, + { + "epoch": 5.85994005994006, + "grad_norm": 0.59765625, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 7336 + }, + { + "epoch": 5.860739260739261, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 7337 + }, + { + "epoch": 5.861538461538462, + "grad_norm": 0.6015625, + "learning_rate": 0.0002, + "loss": 0.8864, + "step": 7338 + }, + { + "epoch": 5.862337662337662, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 7339 + }, + { + "epoch": 5.863136863136863, + "grad_norm": 0.58203125, + "learning_rate": 0.0002, + "loss": 0.8893, + "step": 7340 + }, + { + "epoch": 5.863936063936064, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.9012, + "step": 7341 + }, + { + "epoch": 5.864735264735264, + "grad_norm": 0.609375, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 7342 + }, + { + "epoch": 5.8655344655344654, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.8851, + "step": 7343 + }, + { + "epoch": 5.8663336663336665, + "grad_norm": 0.62109375, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 7344 + }, + { + "epoch": 5.867132867132867, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 7345 + }, + { + "epoch": 5.867932067932068, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.8834, + "step": 7346 + }, + { + "epoch": 5.868731268731269, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.8872, + "step": 7347 + }, + { + "epoch": 5.86953046953047, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 7348 + }, + { + "epoch": 5.870329670329671, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.8843, + "step": 7349 + }, + { + "epoch": 5.871128871128871, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 7350 + }, + { + "epoch": 5.871928071928072, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.8879, + "step": 7351 + }, + { + "epoch": 5.872727272727273, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 0.8893, + "step": 7352 + }, + { + "epoch": 5.873526473526473, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 7353 + }, + { + "epoch": 5.874325674325674, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.9038, + "step": 7354 + }, + { + "epoch": 5.875124875124875, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 7355 + }, + { + "epoch": 5.875924075924076, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 0.9153, + "step": 7356 + }, + { + "epoch": 5.876723276723276, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 7357 + }, + { + "epoch": 5.8775224775224775, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.887, + "step": 7358 + }, + { + "epoch": 5.8783216783216785, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.8888, + "step": 7359 + }, + { + "epoch": 5.8791208791208796, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.899, + "step": 7360 + }, + { + "epoch": 5.87992007992008, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.8911, + "step": 7361 + }, + { + "epoch": 5.880719280719281, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 7362 + }, + { + "epoch": 5.881518481518482, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 7363 + }, + { + "epoch": 5.882317682317682, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 7364 + }, + { + "epoch": 5.883116883116883, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.879, + "step": 7365 + }, + { + "epoch": 5.883916083916084, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.8859, + "step": 7366 + }, + { + "epoch": 5.884715284715285, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 7367 + }, + { + "epoch": 5.885514485514485, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 7368 + }, + { + "epoch": 5.886313686313686, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 7369 + }, + { + "epoch": 5.887112887112887, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.8854, + "step": 7370 + }, + { + "epoch": 5.887912087912088, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 7371 + }, + { + "epoch": 5.8887112887112885, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 7372 + }, + { + "epoch": 5.8895104895104895, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 7373 + }, + { + "epoch": 5.8903096903096905, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.8889, + "step": 7374 + }, + { + "epoch": 5.891108891108891, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 7375 + }, + { + "epoch": 5.891908091908092, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 7376 + }, + { + "epoch": 5.892707292707293, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.8831, + "step": 7377 + }, + { + "epoch": 5.893506493506494, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 7378 + }, + { + "epoch": 5.894305694305694, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.8856, + "step": 7379 + }, + { + "epoch": 5.895104895104895, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.8872, + "step": 7380 + }, + { + "epoch": 5.895904095904096, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.8847, + "step": 7381 + }, + { + "epoch": 5.896703296703297, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.899, + "step": 7382 + }, + { + "epoch": 5.897502497502497, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.8854, + "step": 7383 + }, + { + "epoch": 5.898301698301698, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.8835, + "step": 7384 + }, + { + "epoch": 5.899100899100899, + "grad_norm": 0.58203125, + "learning_rate": 0.0002, + "loss": 0.9088, + "step": 7385 + }, + { + "epoch": 5.8999000999000994, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.8832, + "step": 7386 + }, + { + "epoch": 5.9006993006993005, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.8845, + "step": 7387 + }, + { + "epoch": 5.9014985014985015, + "grad_norm": 0.98828125, + "learning_rate": 0.0002, + "loss": 0.9055, + "step": 7388 + }, + { + "epoch": 5.902297702297703, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 7389 + }, + { + "epoch": 5.903096903096903, + "grad_norm": 0.61328125, + "learning_rate": 0.0002, + "loss": 0.8877, + "step": 7390 + }, + { + "epoch": 5.903896103896104, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.9017, + "step": 7391 + }, + { + "epoch": 5.904695304695305, + "grad_norm": 0.63671875, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 7392 + }, + { + "epoch": 5.905494505494506, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 7393 + }, + { + "epoch": 5.906293706293706, + "grad_norm": 0.7109375, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 7394 + }, + { + "epoch": 5.907092907092907, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 7395 + }, + { + "epoch": 5.907892107892108, + "grad_norm": 0.6953125, + "learning_rate": 0.0002, + "loss": 0.9026, + "step": 7396 + }, + { + "epoch": 5.908691308691308, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 7397 + }, + { + "epoch": 5.909490509490509, + "grad_norm": 0.65625, + "learning_rate": 0.0002, + "loss": 0.8866, + "step": 7398 + }, + { + "epoch": 5.91028971028971, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 7399 + }, + { + "epoch": 5.911088911088911, + "grad_norm": 0.64453125, + "learning_rate": 0.0002, + "loss": 0.8944, + "step": 7400 + }, + { + "epoch": 5.911888111888112, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.899, + "step": 7401 + }, + { + "epoch": 5.9126873126873125, + "grad_norm": 0.6328125, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 7402 + }, + { + "epoch": 5.9134865134865136, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.8845, + "step": 7403 + }, + { + "epoch": 5.914285714285715, + "grad_norm": 0.5859375, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 7404 + }, + { + "epoch": 5.915084915084915, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.9, + "step": 7405 + }, + { + "epoch": 5.915884115884116, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 7406 + }, + { + "epoch": 5.916683316683317, + "grad_norm": 0.66015625, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 7407 + }, + { + "epoch": 5.917482517482518, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 7408 + }, + { + "epoch": 5.918281718281718, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 7409 + }, + { + "epoch": 5.919080919080919, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 7410 + }, + { + "epoch": 5.91988011988012, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 7411 + }, + { + "epoch": 5.920679320679321, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.8859, + "step": 7412 + }, + { + "epoch": 5.921478521478521, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 7413 + }, + { + "epoch": 5.922277722277722, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 7414 + }, + { + "epoch": 5.923076923076923, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.8985, + "step": 7415 + }, + { + "epoch": 5.9238761238761235, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 7416 + }, + { + "epoch": 5.9246753246753245, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 7417 + }, + { + "epoch": 5.925474525474526, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.8852, + "step": 7418 + }, + { + "epoch": 5.926273726273727, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 7419 + }, + { + "epoch": 5.927072927072927, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 7420 + }, + { + "epoch": 5.927872127872128, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 7421 + }, + { + "epoch": 5.928671328671329, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8859, + "step": 7422 + }, + { + "epoch": 5.92947052947053, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.8812, + "step": 7423 + }, + { + "epoch": 5.93026973026973, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8895, + "step": 7424 + }, + { + "epoch": 5.931068931068931, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.8789, + "step": 7425 + }, + { + "epoch": 5.931868131868132, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.9047, + "step": 7426 + }, + { + "epoch": 5.932667332667332, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8863, + "step": 7427 + }, + { + "epoch": 5.933466533466533, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.8864, + "step": 7428 + }, + { + "epoch": 5.934265734265734, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 7429 + }, + { + "epoch": 5.935064935064935, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8876, + "step": 7430 + }, + { + "epoch": 5.9358641358641355, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.8829, + "step": 7431 + }, + { + "epoch": 5.936663336663337, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 7432 + }, + { + "epoch": 5.937462537462538, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.9063, + "step": 7433 + }, + { + "epoch": 5.938261738261739, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.8804, + "step": 7434 + }, + { + "epoch": 5.939060939060939, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8899, + "step": 7435 + }, + { + "epoch": 5.93986013986014, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.8794, + "step": 7436 + }, + { + "epoch": 5.940659340659341, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 7437 + }, + { + "epoch": 5.941458541458541, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 7438 + }, + { + "epoch": 5.942257742257742, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 7439 + }, + { + "epoch": 5.943056943056943, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.8965, + "step": 7440 + }, + { + "epoch": 5.943856143856144, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.8829, + "step": 7441 + }, + { + "epoch": 5.944655344655344, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 7442 + }, + { + "epoch": 5.945454545454545, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 7443 + }, + { + "epoch": 5.946253746253746, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 7444 + }, + { + "epoch": 5.947052947052947, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.9055, + "step": 7445 + }, + { + "epoch": 5.9478521478521476, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.8794, + "step": 7446 + }, + { + "epoch": 5.948651348651349, + "grad_norm": 0.65625, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 7447 + }, + { + "epoch": 5.94945054945055, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.8989, + "step": 7448 + }, + { + "epoch": 5.95024975024975, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8869, + "step": 7449 + }, + { + "epoch": 5.951048951048951, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.8851, + "step": 7450 + }, + { + "epoch": 5.951848151848152, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 7451 + }, + { + "epoch": 5.952647352647353, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.8855, + "step": 7452 + }, + { + "epoch": 5.953446553446554, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.8858, + "step": 7453 + }, + { + "epoch": 5.954245754245754, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 7454 + }, + { + "epoch": 5.955044955044955, + "grad_norm": 0.61328125, + "learning_rate": 0.0002, + "loss": 0.9086, + "step": 7455 + }, + { + "epoch": 5.955844155844156, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 7456 + }, + { + "epoch": 5.956643356643356, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 7457 + }, + { + "epoch": 5.957442557442557, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 7458 + }, + { + "epoch": 5.958241758241758, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 7459 + }, + { + "epoch": 5.959040959040959, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.8893, + "step": 7460 + }, + { + "epoch": 5.95984015984016, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 7461 + }, + { + "epoch": 5.960639360639361, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.8863, + "step": 7462 + }, + { + "epoch": 5.961438561438562, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8818, + "step": 7463 + }, + { + "epoch": 5.962237762237763, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 7464 + }, + { + "epoch": 5.963036963036963, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 7465 + }, + { + "epoch": 5.963836163836164, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8864, + "step": 7466 + }, + { + "epoch": 5.964635364635365, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8867, + "step": 7467 + }, + { + "epoch": 5.965434565434565, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 7468 + }, + { + "epoch": 5.966233766233766, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8816, + "step": 7469 + }, + { + "epoch": 5.967032967032967, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8848, + "step": 7470 + }, + { + "epoch": 5.967832167832168, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.879, + "step": 7471 + }, + { + "epoch": 5.968631368631368, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 7472 + }, + { + "epoch": 5.969430569430569, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8909, + "step": 7473 + }, + { + "epoch": 5.97022977022977, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 7474 + }, + { + "epoch": 5.9710289710289715, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 7475 + }, + { + "epoch": 5.971828171828172, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.8906, + "step": 7476 + }, + { + "epoch": 5.972627372627373, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8876, + "step": 7477 + }, + { + "epoch": 5.973426573426574, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 7478 + }, + { + "epoch": 5.974225774225774, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 7479 + }, + { + "epoch": 5.975024975024975, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8847, + "step": 7480 + }, + { + "epoch": 5.975824175824176, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 7481 + }, + { + "epoch": 5.976623376623377, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8847, + "step": 7482 + }, + { + "epoch": 5.977422577422577, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8866, + "step": 7483 + }, + { + "epoch": 5.978221778221778, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8818, + "step": 7484 + }, + { + "epoch": 5.979020979020979, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.894, + "step": 7485 + }, + { + "epoch": 5.97982017982018, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 7486 + }, + { + "epoch": 5.98061938061938, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 7487 + }, + { + "epoch": 5.981418581418581, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8869, + "step": 7488 + }, + { + "epoch": 5.982217782217782, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8828, + "step": 7489 + }, + { + "epoch": 5.983016983016983, + "grad_norm": 0.2490234375, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 7490 + }, + { + "epoch": 5.983816183816184, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.884, + "step": 7491 + }, + { + "epoch": 5.984615384615385, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8849, + "step": 7492 + }, + { + "epoch": 5.985414585414586, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 7493 + }, + { + "epoch": 5.986213786213786, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 7494 + }, + { + "epoch": 5.987012987012987, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 7495 + }, + { + "epoch": 5.987812187812188, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8845, + "step": 7496 + }, + { + "epoch": 5.988611388611389, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.8893, + "step": 7497 + }, + { + "epoch": 5.989410589410589, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 7498 + }, + { + "epoch": 5.99020979020979, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 7499 + }, + { + "epoch": 5.991008991008991, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.8942, + "step": 7500 + }, + { + "epoch": 5.991808191808191, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 7501 + }, + { + "epoch": 5.992607392607392, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 7502 + }, + { + "epoch": 5.993406593406593, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.8961, + "step": 7503 + }, + { + "epoch": 5.9942057942057945, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 7504 + }, + { + "epoch": 5.995004995004995, + "grad_norm": 0.65234375, + "learning_rate": 0.0002, + "loss": 0.8827, + "step": 7505 + }, + { + "epoch": 5.995804195804196, + "grad_norm": 0.609375, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 7506 + }, + { + "epoch": 5.996603396603397, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.8786, + "step": 7507 + }, + { + "epoch": 5.997402597402598, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 7508 + }, + { + "epoch": 5.998201798201798, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8834, + "step": 7509 + }, + { + "epoch": 5.999000999000999, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.8844, + "step": 7510 + }, + { + "epoch": 5.9998001998002, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.8969, + "step": 7511 + }, + { + "epoch": 6.0, + "grad_norm": 0.09716796875, + "learning_rate": 0.0002, + "loss": 0.2212, + "step": 7512 + }, + { + "epoch": 6.000799200799201, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8876, + "step": 7513 + }, + { + "epoch": 6.001598401598401, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.8784, + "step": 7514 + }, + { + "epoch": 6.002397602397602, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 7515 + }, + { + "epoch": 6.003196803196803, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 7516 + }, + { + "epoch": 6.003996003996004, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8909, + "step": 7517 + }, + { + "epoch": 6.0047952047952045, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 7518 + }, + { + "epoch": 6.0055944055944055, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8828, + "step": 7519 + }, + { + "epoch": 6.0063936063936065, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.9074, + "step": 7520 + }, + { + "epoch": 6.007192807192808, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.9376, + "step": 7521 + }, + { + "epoch": 6.007992007992008, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8857, + "step": 7522 + }, + { + "epoch": 6.008791208791209, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8828, + "step": 7523 + }, + { + "epoch": 6.00959040959041, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 7524 + }, + { + "epoch": 6.01038961038961, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8861, + "step": 7525 + }, + { + "epoch": 6.011188811188811, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8852, + "step": 7526 + }, + { + "epoch": 6.011988011988012, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 7527 + }, + { + "epoch": 6.012787212787213, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8985, + "step": 7528 + }, + { + "epoch": 6.013586413586413, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 7529 + }, + { + "epoch": 6.014385614385614, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8969, + "step": 7530 + }, + { + "epoch": 6.015184815184815, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8809, + "step": 7531 + }, + { + "epoch": 6.015984015984016, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8965, + "step": 7532 + }, + { + "epoch": 6.0167832167832165, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 7533 + }, + { + "epoch": 6.0175824175824175, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 7534 + }, + { + "epoch": 6.018381618381619, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8809, + "step": 7535 + }, + { + "epoch": 6.01918081918082, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 7536 + }, + { + "epoch": 6.01998001998002, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 7537 + }, + { + "epoch": 6.020779220779221, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8778, + "step": 7538 + }, + { + "epoch": 6.021578421578422, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.883, + "step": 7539 + }, + { + "epoch": 6.022377622377622, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8795, + "step": 7540 + }, + { + "epoch": 6.023176823176823, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 7541 + }, + { + "epoch": 6.023976023976024, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.9306, + "step": 7542 + }, + { + "epoch": 6.024775224775225, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 7543 + }, + { + "epoch": 6.025574425574425, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8992, + "step": 7544 + }, + { + "epoch": 6.026373626373626, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8872, + "step": 7545 + }, + { + "epoch": 6.027172827172827, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 7546 + }, + { + "epoch": 6.027972027972028, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8942, + "step": 7547 + }, + { + "epoch": 6.0287712287712285, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 7548 + }, + { + "epoch": 6.0295704295704295, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 7549 + }, + { + "epoch": 6.030369630369631, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8823, + "step": 7550 + }, + { + "epoch": 6.031168831168831, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8876, + "step": 7551 + }, + { + "epoch": 6.031968031968032, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.8944, + "step": 7552 + }, + { + "epoch": 6.032767232767233, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 7553 + }, + { + "epoch": 6.033566433566434, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8818, + "step": 7554 + }, + { + "epoch": 6.034365634365634, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.8849, + "step": 7555 + }, + { + "epoch": 6.035164835164835, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8942, + "step": 7556 + }, + { + "epoch": 6.035964035964036, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.898, + "step": 7557 + }, + { + "epoch": 6.036763236763237, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 7558 + }, + { + "epoch": 6.037562437562437, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.8876, + "step": 7559 + }, + { + "epoch": 6.038361638361638, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.8834, + "step": 7560 + }, + { + "epoch": 6.039160839160839, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8855, + "step": 7561 + }, + { + "epoch": 6.03996003996004, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8862, + "step": 7562 + }, + { + "epoch": 6.0407592407592405, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 7563 + }, + { + "epoch": 6.041558441558442, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8827, + "step": 7564 + }, + { + "epoch": 6.042357642357643, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.8862, + "step": 7565 + }, + { + "epoch": 6.043156843156843, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8848, + "step": 7566 + }, + { + "epoch": 6.043956043956044, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 7567 + }, + { + "epoch": 6.044755244755245, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8886, + "step": 7568 + }, + { + "epoch": 6.045554445554446, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 7569 + }, + { + "epoch": 6.046353646353646, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 7570 + }, + { + "epoch": 6.047152847152847, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 7571 + }, + { + "epoch": 6.047952047952048, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 7572 + }, + { + "epoch": 6.048751248751249, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 7573 + }, + { + "epoch": 6.049550449550449, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 7574 + }, + { + "epoch": 6.05034965034965, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.8861, + "step": 7575 + }, + { + "epoch": 6.051148851148851, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.8872, + "step": 7576 + }, + { + "epoch": 6.0519480519480515, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 7577 + }, + { + "epoch": 6.052747252747253, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 7578 + }, + { + "epoch": 6.053546453546454, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 7579 + }, + { + "epoch": 6.054345654345655, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8886, + "step": 7580 + }, + { + "epoch": 6.055144855144855, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 7581 + }, + { + "epoch": 6.055944055944056, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8888, + "step": 7582 + }, + { + "epoch": 6.056743256743257, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 7583 + }, + { + "epoch": 6.057542457542458, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8787, + "step": 7584 + }, + { + "epoch": 6.058341658341658, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 7585 + }, + { + "epoch": 6.059140859140859, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 7586 + }, + { + "epoch": 6.05994005994006, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8906, + "step": 7587 + }, + { + "epoch": 6.060739260739261, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8865, + "step": 7588 + }, + { + "epoch": 6.061538461538461, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8855, + "step": 7589 + }, + { + "epoch": 6.062337662337662, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8848, + "step": 7590 + }, + { + "epoch": 6.063136863136863, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 7591 + }, + { + "epoch": 6.0639360639360635, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 7592 + }, + { + "epoch": 6.064735264735265, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 7593 + }, + { + "epoch": 6.065534465534466, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 7594 + }, + { + "epoch": 6.066333666333667, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8866, + "step": 7595 + }, + { + "epoch": 6.067132867132867, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 7596 + }, + { + "epoch": 6.067932067932068, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.8914, + "step": 7597 + }, + { + "epoch": 6.068731268731269, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8748, + "step": 7598 + }, + { + "epoch": 6.06953046953047, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8917, + "step": 7599 + }, + { + "epoch": 6.07032967032967, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 7600 + }, + { + "epoch": 6.071128871128871, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 7601 + }, + { + "epoch": 6.071928071928072, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.8884, + "step": 7602 + }, + { + "epoch": 6.072727272727272, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 7603 + }, + { + "epoch": 6.073526473526473, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8787, + "step": 7604 + }, + { + "epoch": 6.074325674325674, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 7605 + }, + { + "epoch": 6.075124875124875, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8782, + "step": 7606 + }, + { + "epoch": 6.075924075924076, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 7607 + }, + { + "epoch": 6.076723276723277, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8909, + "step": 7608 + }, + { + "epoch": 6.077522477522478, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.8844, + "step": 7609 + }, + { + "epoch": 6.078321678321679, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8884, + "step": 7610 + }, + { + "epoch": 6.079120879120879, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.8829, + "step": 7611 + }, + { + "epoch": 6.07992007992008, + "grad_norm": 0.25, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 7612 + }, + { + "epoch": 6.080719280719281, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8812, + "step": 7613 + }, + { + "epoch": 6.081518481518482, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 7614 + }, + { + "epoch": 6.082317682317682, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8917, + "step": 7615 + }, + { + "epoch": 6.083116883116883, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 7616 + }, + { + "epoch": 6.083916083916084, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8979, + "step": 7617 + }, + { + "epoch": 6.084715284715284, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 7618 + }, + { + "epoch": 6.085514485514485, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8872, + "step": 7619 + }, + { + "epoch": 6.086313686313686, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 7620 + }, + { + "epoch": 6.0871128871128874, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.8827, + "step": 7621 + }, + { + "epoch": 6.087912087912088, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 7622 + }, + { + "epoch": 6.088711288711289, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8863, + "step": 7623 + }, + { + "epoch": 6.08951048951049, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 7624 + }, + { + "epoch": 6.090309690309691, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8869, + "step": 7625 + }, + { + "epoch": 6.091108891108891, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8926, + "step": 7626 + }, + { + "epoch": 6.091908091908092, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 7627 + }, + { + "epoch": 6.092707292707293, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.8857, + "step": 7628 + }, + { + "epoch": 6.093506493506493, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8864, + "step": 7629 + }, + { + "epoch": 6.094305694305694, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 7630 + }, + { + "epoch": 6.095104895104895, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.9001, + "step": 7631 + }, + { + "epoch": 6.095904095904096, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 7632 + }, + { + "epoch": 6.096703296703296, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.8853, + "step": 7633 + }, + { + "epoch": 6.097502497502497, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 7634 + }, + { + "epoch": 6.098301698301698, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 7635 + }, + { + "epoch": 6.0991008991008995, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 7636 + }, + { + "epoch": 6.0999000999001, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8811, + "step": 7637 + }, + { + "epoch": 6.100699300699301, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.8822, + "step": 7638 + }, + { + "epoch": 6.101498501498502, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.8917, + "step": 7639 + }, + { + "epoch": 6.102297702297703, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 7640 + }, + { + "epoch": 6.103096903096903, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.8858, + "step": 7641 + }, + { + "epoch": 6.103896103896104, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8992, + "step": 7642 + }, + { + "epoch": 6.104695304695305, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8895, + "step": 7643 + }, + { + "epoch": 6.105494505494505, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 7644 + }, + { + "epoch": 6.106293706293706, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 7645 + }, + { + "epoch": 6.107092907092907, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 7646 + }, + { + "epoch": 6.107892107892108, + "grad_norm": 0.61328125, + "learning_rate": 0.0002, + "loss": 0.8975, + "step": 7647 + }, + { + "epoch": 6.108691308691308, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 7648 + }, + { + "epoch": 6.109490509490509, + "grad_norm": 1.203125, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 7649 + }, + { + "epoch": 6.1102897102897105, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.9039, + "step": 7650 + }, + { + "epoch": 6.1110889110889115, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8926, + "step": 7651 + }, + { + "epoch": 6.111888111888112, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 7652 + }, + { + "epoch": 6.112687312687313, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 7653 + }, + { + "epoch": 6.113486513486514, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 7654 + }, + { + "epoch": 6.114285714285714, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 7655 + }, + { + "epoch": 6.115084915084915, + "grad_norm": 1.078125, + "learning_rate": 0.0002, + "loss": 0.91, + "step": 7656 + }, + { + "epoch": 6.115884115884116, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 7657 + }, + { + "epoch": 6.116683316683317, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 7658 + }, + { + "epoch": 6.117482517482517, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 7659 + }, + { + "epoch": 6.118281718281718, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.894, + "step": 7660 + }, + { + "epoch": 6.119080919080919, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 7661 + }, + { + "epoch": 6.11988011988012, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8819, + "step": 7662 + }, + { + "epoch": 6.12067932067932, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 7663 + }, + { + "epoch": 6.1214785214785215, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 7664 + }, + { + "epoch": 6.1222777222777225, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8832, + "step": 7665 + }, + { + "epoch": 6.123076923076923, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 7666 + }, + { + "epoch": 6.123876123876124, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 7667 + }, + { + "epoch": 6.124675324675325, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 7668 + }, + { + "epoch": 6.125474525474526, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 7669 + }, + { + "epoch": 6.126273726273726, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 7670 + }, + { + "epoch": 6.127072927072927, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8847, + "step": 7671 + }, + { + "epoch": 6.127872127872128, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 7672 + }, + { + "epoch": 6.128671328671329, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 7673 + }, + { + "epoch": 6.129470529470529, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 7674 + }, + { + "epoch": 6.13026973026973, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.8851, + "step": 7675 + }, + { + "epoch": 6.131068931068931, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.9035, + "step": 7676 + }, + { + "epoch": 6.131868131868132, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 7677 + }, + { + "epoch": 6.132667332667332, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8845, + "step": 7678 + }, + { + "epoch": 6.1334665334665335, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 7679 + }, + { + "epoch": 6.1342657342657345, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8879, + "step": 7680 + }, + { + "epoch": 6.135064935064935, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 7681 + }, + { + "epoch": 6.135864135864136, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 7682 + }, + { + "epoch": 6.136663336663337, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8917, + "step": 7683 + }, + { + "epoch": 6.137462537462538, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 7684 + }, + { + "epoch": 6.138261738261738, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 7685 + }, + { + "epoch": 6.139060939060939, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8858, + "step": 7686 + }, + { + "epoch": 6.13986013986014, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 7687 + }, + { + "epoch": 6.140659340659341, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 7688 + }, + { + "epoch": 6.141458541458541, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 7689 + }, + { + "epoch": 6.142257742257742, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.8808, + "step": 7690 + }, + { + "epoch": 6.143056943056943, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.8989, + "step": 7691 + }, + { + "epoch": 6.143856143856144, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.8877, + "step": 7692 + }, + { + "epoch": 6.1446553446553445, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 7693 + }, + { + "epoch": 6.1454545454545455, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8805, + "step": 7694 + }, + { + "epoch": 6.1462537462537465, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 7695 + }, + { + "epoch": 6.147052947052947, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 7696 + }, + { + "epoch": 6.147852147852148, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9, + "step": 7697 + }, + { + "epoch": 6.148651348651349, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8888, + "step": 7698 + }, + { + "epoch": 6.14945054945055, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.9015, + "step": 7699 + }, + { + "epoch": 6.15024975024975, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 7700 + }, + { + "epoch": 6.151048951048951, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8847, + "step": 7701 + }, + { + "epoch": 6.151848151848152, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 7702 + }, + { + "epoch": 6.152647352647353, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8862, + "step": 7703 + }, + { + "epoch": 6.153446553446553, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 7704 + }, + { + "epoch": 6.154245754245754, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8859, + "step": 7705 + }, + { + "epoch": 6.155044955044955, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8799, + "step": 7706 + }, + { + "epoch": 6.1558441558441555, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 7707 + }, + { + "epoch": 6.1566433566433565, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8861, + "step": 7708 + }, + { + "epoch": 6.1574425574425575, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8851, + "step": 7709 + }, + { + "epoch": 6.158241758241759, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 7710 + }, + { + "epoch": 6.159040959040959, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8877, + "step": 7711 + }, + { + "epoch": 6.15984015984016, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8839, + "step": 7712 + }, + { + "epoch": 6.160639360639361, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.88, + "step": 7713 + }, + { + "epoch": 6.161438561438562, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.894, + "step": 7714 + }, + { + "epoch": 6.162237762237762, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.887, + "step": 7715 + }, + { + "epoch": 6.163036963036963, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.8848, + "step": 7716 + }, + { + "epoch": 6.163836163836164, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 7717 + }, + { + "epoch": 6.164635364635364, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8789, + "step": 7718 + }, + { + "epoch": 6.165434565434565, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 7719 + }, + { + "epoch": 6.166233766233766, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8858, + "step": 7720 + }, + { + "epoch": 6.167032967032967, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 7721 + }, + { + "epoch": 6.1678321678321675, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8876, + "step": 7722 + }, + { + "epoch": 6.1686313686313685, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8859, + "step": 7723 + }, + { + "epoch": 6.1694305694305696, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 7724 + }, + { + "epoch": 6.170229770229771, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8855, + "step": 7725 + }, + { + "epoch": 6.171028971028971, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 7726 + }, + { + "epoch": 6.171828171828172, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.8944, + "step": 7727 + }, + { + "epoch": 6.172627372627373, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8917, + "step": 7728 + }, + { + "epoch": 6.173426573426573, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8859, + "step": 7729 + }, + { + "epoch": 6.174225774225774, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 7730 + }, + { + "epoch": 6.175024975024975, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 7731 + }, + { + "epoch": 6.175824175824176, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8844, + "step": 7732 + }, + { + "epoch": 6.176623376623376, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 7733 + }, + { + "epoch": 6.177422577422577, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.9, + "step": 7734 + }, + { + "epoch": 6.178221778221778, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 7735 + }, + { + "epoch": 6.179020979020979, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 7736 + }, + { + "epoch": 6.1798201798201795, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 7737 + }, + { + "epoch": 6.1806193806193805, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.8852, + "step": 7738 + }, + { + "epoch": 6.181418581418582, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8891, + "step": 7739 + }, + { + "epoch": 6.182217782217783, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 7740 + }, + { + "epoch": 6.183016983016983, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 7741 + }, + { + "epoch": 6.183816183816184, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 7742 + }, + { + "epoch": 6.184615384615385, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8857, + "step": 7743 + }, + { + "epoch": 6.185414585414585, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8786, + "step": 7744 + }, + { + "epoch": 6.186213786213786, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8835, + "step": 7745 + }, + { + "epoch": 6.187012987012987, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 7746 + }, + { + "epoch": 6.187812187812188, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 7747 + }, + { + "epoch": 6.188611388611388, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 7748 + }, + { + "epoch": 6.189410589410589, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 7749 + }, + { + "epoch": 6.19020979020979, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.8831, + "step": 7750 + }, + { + "epoch": 6.191008991008991, + "grad_norm": 0.734375, + "learning_rate": 0.0002, + "loss": 0.9078, + "step": 7751 + }, + { + "epoch": 6.1918081918081915, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8784, + "step": 7752 + }, + { + "epoch": 6.192607392607393, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 7753 + }, + { + "epoch": 6.193406593406594, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 7754 + }, + { + "epoch": 6.194205794205795, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 7755 + }, + { + "epoch": 6.195004995004995, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8839, + "step": 7756 + }, + { + "epoch": 6.195804195804196, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 7757 + }, + { + "epoch": 6.196603396603397, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.884, + "step": 7758 + }, + { + "epoch": 6.197402597402597, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.8818, + "step": 7759 + }, + { + "epoch": 6.198201798201798, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8965, + "step": 7760 + }, + { + "epoch": 6.199000999000999, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 7761 + }, + { + "epoch": 6.1998001998002, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8978, + "step": 7762 + }, + { + "epoch": 6.2005994005994, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 7763 + }, + { + "epoch": 6.201398601398601, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.8843, + "step": 7764 + }, + { + "epoch": 6.202197802197802, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.881, + "step": 7765 + }, + { + "epoch": 6.202997002997003, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8731, + "step": 7766 + }, + { + "epoch": 6.203796203796204, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 7767 + }, + { + "epoch": 6.204595404595405, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.8854, + "step": 7768 + }, + { + "epoch": 6.205394605394606, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.8917, + "step": 7769 + }, + { + "epoch": 6.206193806193806, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8864, + "step": 7770 + }, + { + "epoch": 6.206993006993007, + "grad_norm": 0.25, + "learning_rate": 0.0002, + "loss": 0.8864, + "step": 7771 + }, + { + "epoch": 6.207792207792208, + "grad_norm": 0.25390625, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 7772 + }, + { + "epoch": 6.208591408591409, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 7773 + }, + { + "epoch": 6.209390609390609, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8852, + "step": 7774 + }, + { + "epoch": 6.21018981018981, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.8888, + "step": 7775 + }, + { + "epoch": 6.210989010989011, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 7776 + }, + { + "epoch": 6.211788211788212, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8849, + "step": 7777 + }, + { + "epoch": 6.212587412587412, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8957, + "step": 7778 + }, + { + "epoch": 6.213386613386613, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8979, + "step": 7779 + }, + { + "epoch": 6.214185814185814, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8923, + "step": 7780 + }, + { + "epoch": 6.2149850149850145, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8823, + "step": 7781 + }, + { + "epoch": 6.215784215784216, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8778, + "step": 7782 + }, + { + "epoch": 6.216583416583417, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8891, + "step": 7783 + }, + { + "epoch": 6.217382617382618, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.8847, + "step": 7784 + }, + { + "epoch": 6.218181818181818, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8923, + "step": 7785 + }, + { + "epoch": 6.218981018981019, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8977, + "step": 7786 + }, + { + "epoch": 6.21978021978022, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.8858, + "step": 7787 + }, + { + "epoch": 6.220579420579421, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8914, + "step": 7788 + }, + { + "epoch": 6.221378621378621, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 7789 + }, + { + "epoch": 6.222177822177822, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8832, + "step": 7790 + }, + { + "epoch": 6.222977022977023, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8779, + "step": 7791 + }, + { + "epoch": 6.223776223776224, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8879, + "step": 7792 + }, + { + "epoch": 6.224575424575424, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8842, + "step": 7793 + }, + { + "epoch": 6.225374625374625, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8818, + "step": 7794 + }, + { + "epoch": 6.226173826173826, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8804, + "step": 7795 + }, + { + "epoch": 6.226973026973027, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8837, + "step": 7796 + }, + { + "epoch": 6.227772227772228, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 7797 + }, + { + "epoch": 6.228571428571429, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 7798 + }, + { + "epoch": 6.22937062937063, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 7799 + }, + { + "epoch": 6.23016983016983, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8845, + "step": 7800 + }, + { + "epoch": 6.230969030969031, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 7801 + }, + { + "epoch": 6.231768231768232, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8886, + "step": 7802 + }, + { + "epoch": 6.232567432567433, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 7803 + }, + { + "epoch": 6.233366633366633, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.882, + "step": 7804 + }, + { + "epoch": 6.234165834165834, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8783, + "step": 7805 + }, + { + "epoch": 6.234965034965035, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 7806 + }, + { + "epoch": 6.235764235764236, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 7807 + }, + { + "epoch": 6.236563436563436, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8808, + "step": 7808 + }, + { + "epoch": 6.237362637362637, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8844, + "step": 7809 + }, + { + "epoch": 6.2381618381618384, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8782, + "step": 7810 + }, + { + "epoch": 6.238961038961039, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.934, + "step": 7811 + }, + { + "epoch": 6.23976023976024, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8891, + "step": 7812 + }, + { + "epoch": 6.240559440559441, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8869, + "step": 7813 + }, + { + "epoch": 6.241358641358642, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 7814 + }, + { + "epoch": 6.242157842157842, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 7815 + }, + { + "epoch": 6.242957042957043, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8849, + "step": 7816 + }, + { + "epoch": 6.243756243756244, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 7817 + }, + { + "epoch": 6.244555444555445, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 7818 + }, + { + "epoch": 6.245354645354645, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 7819 + }, + { + "epoch": 6.246153846153846, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8846, + "step": 7820 + }, + { + "epoch": 6.246953046953047, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 7821 + }, + { + "epoch": 6.247752247752247, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 7822 + }, + { + "epoch": 6.248551448551448, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 7823 + }, + { + "epoch": 6.249350649350649, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.8846, + "step": 7824 + }, + { + "epoch": 6.2501498501498505, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 7825 + }, + { + "epoch": 6.250949050949051, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 7826 + }, + { + "epoch": 6.251748251748252, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 7827 + }, + { + "epoch": 6.252547452547453, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.884, + "step": 7828 + }, + { + "epoch": 6.253346653346654, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 7829 + }, + { + "epoch": 6.254145854145854, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.881, + "step": 7830 + }, + { + "epoch": 6.254945054945055, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8842, + "step": 7831 + }, + { + "epoch": 6.255744255744256, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 7832 + }, + { + "epoch": 6.256543456543456, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 7833 + }, + { + "epoch": 6.257342657342657, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8817, + "step": 7834 + }, + { + "epoch": 6.258141858141858, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.8861, + "step": 7835 + }, + { + "epoch": 6.258941058941059, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8842, + "step": 7836 + }, + { + "epoch": 6.259740259740259, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8899, + "step": 7837 + }, + { + "epoch": 6.26053946053946, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8863, + "step": 7838 + }, + { + "epoch": 6.2613386613386615, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8942, + "step": 7839 + }, + { + "epoch": 6.2621378621378625, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 7840 + }, + { + "epoch": 6.262937062937063, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 7841 + }, + { + "epoch": 6.263736263736264, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 7842 + }, + { + "epoch": 6.264535464535465, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8909, + "step": 7843 + }, + { + "epoch": 6.265334665334665, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 7844 + }, + { + "epoch": 6.266133866133866, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 7845 + }, + { + "epoch": 6.266933066933067, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8835, + "step": 7846 + }, + { + "epoch": 6.267732267732268, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8835, + "step": 7847 + }, + { + "epoch": 6.268531468531468, + "grad_norm": 1.4140625, + "learning_rate": 0.0002, + "loss": 0.9016, + "step": 7848 + }, + { + "epoch": 6.269330669330669, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8895, + "step": 7849 + }, + { + "epoch": 6.27012987012987, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8923, + "step": 7850 + }, + { + "epoch": 6.270929070929071, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8877, + "step": 7851 + }, + { + "epoch": 6.271728271728271, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.884, + "step": 7852 + }, + { + "epoch": 6.2725274725274724, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8849, + "step": 7853 + }, + { + "epoch": 6.2733266733266735, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 7854 + }, + { + "epoch": 6.2741258741258745, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 7855 + }, + { + "epoch": 6.274925074925075, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 7856 + }, + { + "epoch": 6.275724275724276, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.899, + "step": 7857 + }, + { + "epoch": 6.276523476523477, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.8861, + "step": 7858 + }, + { + "epoch": 6.277322677322678, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 7859 + }, + { + "epoch": 6.278121878121878, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8824, + "step": 7860 + }, + { + "epoch": 6.278921078921079, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.8861, + "step": 7861 + }, + { + "epoch": 6.27972027972028, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 7862 + }, + { + "epoch": 6.28051948051948, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8832, + "step": 7863 + }, + { + "epoch": 6.281318681318681, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8832, + "step": 7864 + }, + { + "epoch": 6.282117882117882, + "grad_norm": 0.6796875, + "learning_rate": 0.0002, + "loss": 0.9002, + "step": 7865 + }, + { + "epoch": 6.282917082917083, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 7866 + }, + { + "epoch": 6.283716283716283, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.894, + "step": 7867 + }, + { + "epoch": 6.2845154845154845, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 7868 + }, + { + "epoch": 6.2853146853146855, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 7869 + }, + { + "epoch": 6.2861138861138866, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8833, + "step": 7870 + }, + { + "epoch": 6.286913086913087, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 7871 + }, + { + "epoch": 6.287712287712288, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.9026, + "step": 7872 + }, + { + "epoch": 6.288511488511489, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 7873 + }, + { + "epoch": 6.289310689310689, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 7874 + }, + { + "epoch": 6.29010989010989, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.8966, + "step": 7875 + }, + { + "epoch": 6.290909090909091, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.8958, + "step": 7876 + }, + { + "epoch": 6.291708291708292, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 7877 + }, + { + "epoch": 6.292507492507492, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 7878 + }, + { + "epoch": 6.293306693306693, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8835, + "step": 7879 + }, + { + "epoch": 6.294105894105894, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 7880 + }, + { + "epoch": 6.294905094905095, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 7881 + }, + { + "epoch": 6.2957042957042955, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.8841, + "step": 7882 + }, + { + "epoch": 6.2965034965034965, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 7883 + }, + { + "epoch": 6.2973026973026975, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 7884 + }, + { + "epoch": 6.298101898101898, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 7885 + }, + { + "epoch": 6.298901098901099, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.882, + "step": 7886 + }, + { + "epoch": 6.2997002997003, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8972, + "step": 7887 + }, + { + "epoch": 6.300499500499501, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 7888 + }, + { + "epoch": 6.301298701298701, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 7889 + }, + { + "epoch": 6.302097902097902, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 7890 + }, + { + "epoch": 6.302897102897103, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 7891 + }, + { + "epoch": 6.303696303696304, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8826, + "step": 7892 + }, + { + "epoch": 6.304495504495504, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.8911, + "step": 7893 + }, + { + "epoch": 6.305294705294705, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 7894 + }, + { + "epoch": 6.306093906093906, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8893, + "step": 7895 + }, + { + "epoch": 6.3068931068931064, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8773, + "step": 7896 + }, + { + "epoch": 6.3076923076923075, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8944, + "step": 7897 + }, + { + "epoch": 6.3084915084915085, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 7898 + }, + { + "epoch": 6.30929070929071, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 7899 + }, + { + "epoch": 6.31008991008991, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 7900 + }, + { + "epoch": 6.310889110889111, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8813, + "step": 7901 + }, + { + "epoch": 6.311688311688312, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 7902 + }, + { + "epoch": 6.312487512487513, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8957, + "step": 7903 + }, + { + "epoch": 6.313286713286713, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 7904 + }, + { + "epoch": 6.314085914085914, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8796, + "step": 7905 + }, + { + "epoch": 6.314885114885115, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8871, + "step": 7906 + }, + { + "epoch": 6.315684315684316, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 7907 + }, + { + "epoch": 6.316483516483516, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 7908 + }, + { + "epoch": 6.317282717282717, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.8776, + "step": 7909 + }, + { + "epoch": 6.318081918081918, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 7910 + }, + { + "epoch": 6.3188811188811185, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.8753, + "step": 7911 + }, + { + "epoch": 6.3196803196803195, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 7912 + }, + { + "epoch": 6.3204795204795206, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8875, + "step": 7913 + }, + { + "epoch": 6.321278721278722, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.8853, + "step": 7914 + }, + { + "epoch": 6.322077922077922, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8834, + "step": 7915 + }, + { + "epoch": 6.322877122877123, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 7916 + }, + { + "epoch": 6.323676323676324, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8845, + "step": 7917 + }, + { + "epoch": 6.324475524475525, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8858, + "step": 7918 + }, + { + "epoch": 6.325274725274725, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8834, + "step": 7919 + }, + { + "epoch": 6.326073926073926, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8877, + "step": 7920 + }, + { + "epoch": 6.326873126873127, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 7921 + }, + { + "epoch": 6.327672327672328, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 7922 + }, + { + "epoch": 6.328471528471528, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 7923 + }, + { + "epoch": 6.329270729270729, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 7924 + }, + { + "epoch": 6.33006993006993, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 7925 + }, + { + "epoch": 6.3308691308691305, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 7926 + }, + { + "epoch": 6.3316683316683315, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8848, + "step": 7927 + }, + { + "epoch": 6.332467532467533, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.887, + "step": 7928 + }, + { + "epoch": 6.333266733266734, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 7929 + }, + { + "epoch": 6.334065934065934, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8865, + "step": 7930 + }, + { + "epoch": 6.334865134865135, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 7931 + }, + { + "epoch": 6.335664335664336, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8863, + "step": 7932 + }, + { + "epoch": 6.336463536463537, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8865, + "step": 7933 + }, + { + "epoch": 6.337262737262737, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8812, + "step": 7934 + }, + { + "epoch": 6.338061938061938, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8864, + "step": 7935 + }, + { + "epoch": 6.338861138861139, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 7936 + }, + { + "epoch": 6.339660339660339, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8875, + "step": 7937 + }, + { + "epoch": 6.34045954045954, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8866, + "step": 7938 + }, + { + "epoch": 6.341258741258741, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 7939 + }, + { + "epoch": 6.342057942057942, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 7940 + }, + { + "epoch": 6.3428571428571425, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8899, + "step": 7941 + }, + { + "epoch": 6.343656343656344, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 7942 + }, + { + "epoch": 6.344455544455545, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8842, + "step": 7943 + }, + { + "epoch": 6.345254745254746, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.8757, + "step": 7944 + }, + { + "epoch": 6.346053946053946, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8877, + "step": 7945 + }, + { + "epoch": 6.346853146853147, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 7946 + }, + { + "epoch": 6.347652347652348, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8857, + "step": 7947 + }, + { + "epoch": 6.348451548451548, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.885, + "step": 7948 + }, + { + "epoch": 6.349250749250749, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8804, + "step": 7949 + }, + { + "epoch": 6.35004995004995, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 7950 + }, + { + "epoch": 6.350849150849151, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 7951 + }, + { + "epoch": 6.351648351648351, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 7952 + }, + { + "epoch": 6.352447552447552, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 7953 + }, + { + "epoch": 6.353246753246753, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8961, + "step": 7954 + }, + { + "epoch": 6.354045954045954, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8849, + "step": 7955 + }, + { + "epoch": 6.3548451548451546, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8877, + "step": 7956 + }, + { + "epoch": 6.355644355644356, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.9009, + "step": 7957 + }, + { + "epoch": 6.356443556443557, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.887, + "step": 7958 + }, + { + "epoch": 6.357242757242757, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 7959 + }, + { + "epoch": 6.358041958041958, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.887, + "step": 7960 + }, + { + "epoch": 6.358841158841159, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 7961 + }, + { + "epoch": 6.35964035964036, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 7962 + }, + { + "epoch": 6.36043956043956, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 7963 + }, + { + "epoch": 6.361238761238761, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8876, + "step": 7964 + }, + { + "epoch": 6.362037962037962, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 7965 + }, + { + "epoch": 6.362837162837163, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8852, + "step": 7966 + }, + { + "epoch": 6.363636363636363, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8875, + "step": 7967 + }, + { + "epoch": 6.364435564435564, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 7968 + }, + { + "epoch": 6.365234765234765, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 7969 + }, + { + "epoch": 6.366033966033966, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8854, + "step": 7970 + }, + { + "epoch": 6.366833166833167, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8784, + "step": 7971 + }, + { + "epoch": 6.367632367632368, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8857, + "step": 7972 + }, + { + "epoch": 6.368431568431569, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8829, + "step": 7973 + }, + { + "epoch": 6.36923076923077, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 7974 + }, + { + "epoch": 6.37002997002997, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 7975 + }, + { + "epoch": 6.370829170829171, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 7976 + }, + { + "epoch": 6.371628371628372, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8862, + "step": 7977 + }, + { + "epoch": 6.372427572427572, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8847, + "step": 7978 + }, + { + "epoch": 6.373226773226773, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 7979 + }, + { + "epoch": 6.374025974025974, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.899, + "step": 7980 + }, + { + "epoch": 6.374825174825175, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8865, + "step": 7981 + }, + { + "epoch": 6.375624375624375, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8856, + "step": 7982 + }, + { + "epoch": 6.376423576423576, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8914, + "step": 7983 + }, + { + "epoch": 6.377222777222777, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8988, + "step": 7984 + }, + { + "epoch": 6.3780219780219785, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8866, + "step": 7985 + }, + { + "epoch": 6.378821178821179, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8853, + "step": 7986 + }, + { + "epoch": 6.37962037962038, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 7987 + }, + { + "epoch": 6.380419580419581, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.8942, + "step": 7988 + }, + { + "epoch": 6.381218781218781, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.8934, + "step": 7989 + }, + { + "epoch": 6.382017982017982, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8809, + "step": 7990 + }, + { + "epoch": 6.382817182817183, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8893, + "step": 7991 + }, + { + "epoch": 6.383616383616384, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 7992 + }, + { + "epoch": 6.384415584415584, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 7993 + }, + { + "epoch": 6.385214785214785, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8893, + "step": 7994 + }, + { + "epoch": 6.386013986013986, + "grad_norm": 2.828125, + "learning_rate": 0.0002, + "loss": 0.9121, + "step": 7995 + }, + { + "epoch": 6.386813186813187, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.8819, + "step": 7996 + }, + { + "epoch": 6.387612387612387, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.8837, + "step": 7997 + }, + { + "epoch": 6.388411588411588, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 7998 + }, + { + "epoch": 6.389210789210789, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.8841, + "step": 7999 + }, + { + "epoch": 6.39000999000999, + "grad_norm": 0.62109375, + "learning_rate": 0.0002, + "loss": 0.8831, + "step": 8000 + }, + { + "epoch": 6.390809190809191, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8836, + "step": 8001 + }, + { + "epoch": 6.391608391608392, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.8846, + "step": 8002 + }, + { + "epoch": 6.392407592407593, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 8003 + }, + { + "epoch": 6.393206793206793, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 8004 + }, + { + "epoch": 6.394005994005994, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.8842, + "step": 8005 + }, + { + "epoch": 6.394805194805195, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 8006 + }, + { + "epoch": 6.395604395604396, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 8007 + }, + { + "epoch": 6.396403596403596, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 8008 + }, + { + "epoch": 6.397202797202797, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 8009 + }, + { + "epoch": 6.398001998001998, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.8857, + "step": 8010 + }, + { + "epoch": 6.398801198801198, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8754, + "step": 8011 + }, + { + "epoch": 6.399600399600399, + "grad_norm": 0.60546875, + "learning_rate": 0.0002, + "loss": 0.8899, + "step": 8012 + }, + { + "epoch": 6.4003996003996, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 8013 + }, + { + "epoch": 6.4011988011988015, + "grad_norm": 0.5859375, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 8014 + }, + { + "epoch": 6.401998001998002, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 8015 + }, + { + "epoch": 6.402797202797203, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 8016 + }, + { + "epoch": 6.403596403596404, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.8884, + "step": 8017 + }, + { + "epoch": 6.404395604395605, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.8942, + "step": 8018 + }, + { + "epoch": 6.405194805194805, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.8889, + "step": 8019 + }, + { + "epoch": 6.405994005994006, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.8875, + "step": 8020 + }, + { + "epoch": 6.406793206793207, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.8979, + "step": 8021 + }, + { + "epoch": 6.407592407592408, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 8022 + }, + { + "epoch": 6.408391608391608, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.8845, + "step": 8023 + }, + { + "epoch": 6.409190809190809, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 8024 + }, + { + "epoch": 6.40999000999001, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 8025 + }, + { + "epoch": 6.41078921078921, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 8026 + }, + { + "epoch": 6.411588411588411, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 8027 + }, + { + "epoch": 6.4123876123876125, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.8839, + "step": 8028 + }, + { + "epoch": 6.4131868131868135, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.8895, + "step": 8029 + }, + { + "epoch": 6.413986013986014, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 8030 + }, + { + "epoch": 6.414785214785215, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.8808, + "step": 8031 + }, + { + "epoch": 6.415584415584416, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.8883, + "step": 8032 + }, + { + "epoch": 6.416383616383617, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.8784, + "step": 8033 + }, + { + "epoch": 6.417182817182817, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 8034 + }, + { + "epoch": 6.417982017982018, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 8035 + }, + { + "epoch": 6.418781218781219, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.8909, + "step": 8036 + }, + { + "epoch": 6.41958041958042, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 8037 + }, + { + "epoch": 6.42037962037962, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.8827, + "step": 8038 + }, + { + "epoch": 6.421178821178821, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.8869, + "step": 8039 + }, + { + "epoch": 6.421978021978022, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 8040 + }, + { + "epoch": 6.422777222777222, + "grad_norm": 0.61328125, + "learning_rate": 0.0002, + "loss": 0.88, + "step": 8041 + }, + { + "epoch": 6.4235764235764234, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.8849, + "step": 8042 + }, + { + "epoch": 6.4243756243756245, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 8043 + }, + { + "epoch": 6.4251748251748255, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.8834, + "step": 8044 + }, + { + "epoch": 6.425974025974026, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.8961, + "step": 8045 + }, + { + "epoch": 6.426773226773227, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.8888, + "step": 8046 + }, + { + "epoch": 6.427572427572428, + "grad_norm": 0.59765625, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 8047 + }, + { + "epoch": 6.428371628371629, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 8048 + }, + { + "epoch": 6.429170829170829, + "grad_norm": 0.63671875, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 8049 + }, + { + "epoch": 6.42997002997003, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.8839, + "step": 8050 + }, + { + "epoch": 6.430769230769231, + "grad_norm": 0.65234375, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 8051 + }, + { + "epoch": 6.431568431568431, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.8923, + "step": 8052 + }, + { + "epoch": 6.432367632367632, + "grad_norm": 0.6015625, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 8053 + }, + { + "epoch": 6.433166833166833, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 8054 + }, + { + "epoch": 6.433966033966034, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 8055 + }, + { + "epoch": 6.434765234765234, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.8856, + "step": 8056 + }, + { + "epoch": 6.4355644355644355, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 8057 + }, + { + "epoch": 6.4363636363636365, + "grad_norm": 0.57421875, + "learning_rate": 0.0002, + "loss": 0.8755, + "step": 8058 + }, + { + "epoch": 6.4371628371628375, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.8875, + "step": 8059 + }, + { + "epoch": 6.437962037962038, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 8060 + }, + { + "epoch": 6.438761238761239, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.9003, + "step": 8061 + }, + { + "epoch": 6.43956043956044, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 0.8831, + "step": 8062 + }, + { + "epoch": 6.44035964035964, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 8063 + }, + { + "epoch": 6.441158841158841, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.8889, + "step": 8064 + }, + { + "epoch": 6.441958041958042, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.8856, + "step": 8065 + }, + { + "epoch": 6.442757242757243, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 0.8846, + "step": 8066 + }, + { + "epoch": 6.443556443556443, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.8861, + "step": 8067 + }, + { + "epoch": 6.444355644355644, + "grad_norm": 0.5859375, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 8068 + }, + { + "epoch": 6.445154845154845, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 8069 + }, + { + "epoch": 6.445954045954046, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 8070 + }, + { + "epoch": 6.4467532467532465, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.8955, + "step": 8071 + }, + { + "epoch": 6.4475524475524475, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 8072 + }, + { + "epoch": 6.4483516483516485, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.8861, + "step": 8073 + }, + { + "epoch": 6.449150849150849, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 8074 + }, + { + "epoch": 6.44995004995005, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.8824, + "step": 8075 + }, + { + "epoch": 6.450749250749251, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.8854, + "step": 8076 + }, + { + "epoch": 6.451548451548452, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.8871, + "step": 8077 + }, + { + "epoch": 6.452347652347652, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 8078 + }, + { + "epoch": 6.453146853146853, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 8079 + }, + { + "epoch": 6.453946053946054, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 8080 + }, + { + "epoch": 6.454745254745255, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.8823, + "step": 8081 + }, + { + "epoch": 6.455544455544455, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.8855, + "step": 8082 + }, + { + "epoch": 6.456343656343656, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.8985, + "step": 8083 + }, + { + "epoch": 6.457142857142857, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.8815, + "step": 8084 + }, + { + "epoch": 6.457942057942058, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 8085 + }, + { + "epoch": 6.4587412587412585, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 8086 + }, + { + "epoch": 6.4595404595404595, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 8087 + }, + { + "epoch": 6.460339660339661, + "grad_norm": 0.578125, + "learning_rate": 0.0002, + "loss": 0.8989, + "step": 8088 + }, + { + "epoch": 6.461138861138862, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.8979, + "step": 8089 + }, + { + "epoch": 6.461938061938062, + "grad_norm": 0.59765625, + "learning_rate": 0.0002, + "loss": 0.8773, + "step": 8090 + }, + { + "epoch": 6.462737262737263, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 8091 + }, + { + "epoch": 6.463536463536464, + "grad_norm": 0.62109375, + "learning_rate": 0.0002, + "loss": 0.8796, + "step": 8092 + }, + { + "epoch": 6.464335664335664, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 8093 + }, + { + "epoch": 6.465134865134865, + "grad_norm": 0.6484375, + "learning_rate": 0.0002, + "loss": 0.8856, + "step": 8094 + }, + { + "epoch": 6.465934065934066, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 8095 + }, + { + "epoch": 6.466733266733267, + "grad_norm": 0.640625, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 8096 + }, + { + "epoch": 6.467532467532467, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.8819, + "step": 8097 + }, + { + "epoch": 6.468331668331668, + "grad_norm": 0.68359375, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 8098 + }, + { + "epoch": 6.469130869130869, + "grad_norm": 0.578125, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 8099 + }, + { + "epoch": 6.46993006993007, + "grad_norm": 0.62109375, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 8100 + }, + { + "epoch": 6.4707292707292705, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 8101 + }, + { + "epoch": 6.4715284715284715, + "grad_norm": 0.6171875, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 8102 + }, + { + "epoch": 6.472327672327673, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.8883, + "step": 8103 + }, + { + "epoch": 6.473126873126873, + "grad_norm": 0.58203125, + "learning_rate": 0.0002, + "loss": 0.8833, + "step": 8104 + }, + { + "epoch": 6.473926073926074, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 8105 + }, + { + "epoch": 6.474725274725275, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.8758, + "step": 8106 + }, + { + "epoch": 6.475524475524476, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.8889, + "step": 8107 + }, + { + "epoch": 6.476323676323676, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 8108 + }, + { + "epoch": 6.477122877122877, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.8796, + "step": 8109 + }, + { + "epoch": 6.477922077922078, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.897, + "step": 8110 + }, + { + "epoch": 6.478721278721279, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.8853, + "step": 8111 + }, + { + "epoch": 6.479520479520479, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.8861, + "step": 8112 + }, + { + "epoch": 6.48031968031968, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.8867, + "step": 8113 + }, + { + "epoch": 6.481118881118881, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 8114 + }, + { + "epoch": 6.4819180819180815, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.8938, + "step": 8115 + }, + { + "epoch": 6.4827172827172825, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.8836, + "step": 8116 + }, + { + "epoch": 6.483516483516484, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 8117 + }, + { + "epoch": 6.484315684315685, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.8776, + "step": 8118 + }, + { + "epoch": 6.485114885114885, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 8119 + }, + { + "epoch": 6.485914085914086, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8765, + "step": 8120 + }, + { + "epoch": 6.486713286713287, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 8121 + }, + { + "epoch": 6.487512487512488, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.8879, + "step": 8122 + }, + { + "epoch": 6.488311688311688, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8864, + "step": 8123 + }, + { + "epoch": 6.489110889110889, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 8124 + }, + { + "epoch": 6.48991008991009, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8796, + "step": 8125 + }, + { + "epoch": 6.49070929070929, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 8126 + }, + { + "epoch": 6.491508491508491, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 8127 + }, + { + "epoch": 6.492307692307692, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8869, + "step": 8128 + }, + { + "epoch": 6.493106893106893, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.8994, + "step": 8129 + }, + { + "epoch": 6.4939060939060935, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8886, + "step": 8130 + }, + { + "epoch": 6.494705294705295, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 8131 + }, + { + "epoch": 6.495504495504496, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8889, + "step": 8132 + }, + { + "epoch": 6.496303696303697, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 8133 + }, + { + "epoch": 6.497102897102897, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8795, + "step": 8134 + }, + { + "epoch": 6.497902097902098, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.8865, + "step": 8135 + }, + { + "epoch": 6.498701298701299, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 8136 + }, + { + "epoch": 6.4995004995005, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 8137 + }, + { + "epoch": 6.5002997002997, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8995, + "step": 8138 + }, + { + "epoch": 6.501098901098901, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 8139 + }, + { + "epoch": 6.501898101898102, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 8140 + }, + { + "epoch": 6.502697302697303, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 8141 + }, + { + "epoch": 6.503496503496503, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8786, + "step": 8142 + }, + { + "epoch": 6.504295704295704, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8857, + "step": 8143 + }, + { + "epoch": 6.505094905094905, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8839, + "step": 8144 + }, + { + "epoch": 6.5058941058941056, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 8145 + }, + { + "epoch": 6.506693306693307, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8872, + "step": 8146 + }, + { + "epoch": 6.507492507492508, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8866, + "step": 8147 + }, + { + "epoch": 6.508291708291709, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8889, + "step": 8148 + }, + { + "epoch": 6.509090909090909, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8829, + "step": 8149 + }, + { + "epoch": 6.50989010989011, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8812, + "step": 8150 + }, + { + "epoch": 6.510689310689311, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8842, + "step": 8151 + }, + { + "epoch": 6.511488511488512, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8876, + "step": 8152 + }, + { + "epoch": 6.512287712287712, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8911, + "step": 8153 + }, + { + "epoch": 6.513086913086913, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 8154 + }, + { + "epoch": 6.513886113886114, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 8155 + }, + { + "epoch": 6.514685314685314, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 8156 + }, + { + "epoch": 6.515484515484515, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 8157 + }, + { + "epoch": 6.516283716283716, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 8158 + }, + { + "epoch": 6.517082917082917, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.8886, + "step": 8159 + }, + { + "epoch": 6.517882117882118, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8848, + "step": 8160 + }, + { + "epoch": 6.518681318681319, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8809, + "step": 8161 + }, + { + "epoch": 6.51948051948052, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 8162 + }, + { + "epoch": 6.520279720279721, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 8163 + }, + { + "epoch": 6.521078921078921, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8832, + "step": 8164 + }, + { + "epoch": 6.521878121878122, + "grad_norm": 2.9375, + "learning_rate": 0.0002, + "loss": 0.9355, + "step": 8165 + }, + { + "epoch": 6.522677322677323, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 8166 + }, + { + "epoch": 6.523476523476523, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8849, + "step": 8167 + }, + { + "epoch": 6.524275724275724, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8906, + "step": 8168 + }, + { + "epoch": 6.525074925074925, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8823, + "step": 8169 + }, + { + "epoch": 6.525874125874126, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.9016, + "step": 8170 + }, + { + "epoch": 6.526673326673326, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.8829, + "step": 8171 + }, + { + "epoch": 6.527472527472527, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8793, + "step": 8172 + }, + { + "epoch": 6.528271728271728, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 8173 + }, + { + "epoch": 6.5290709290709295, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8864, + "step": 8174 + }, + { + "epoch": 6.52987012987013, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 8175 + }, + { + "epoch": 6.530669330669331, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8991, + "step": 8176 + }, + { + "epoch": 6.531468531468532, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.8767, + "step": 8177 + }, + { + "epoch": 6.532267732267732, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 8178 + }, + { + "epoch": 6.533066933066933, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 8179 + }, + { + "epoch": 6.533866133866134, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 8180 + }, + { + "epoch": 6.534665334665335, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.8883, + "step": 8181 + }, + { + "epoch": 6.535464535464535, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.877, + "step": 8182 + }, + { + "epoch": 6.536263736263736, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.8848, + "step": 8183 + }, + { + "epoch": 6.537062937062937, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 8184 + }, + { + "epoch": 6.537862137862138, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 8185 + }, + { + "epoch": 6.538661338661338, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.9014, + "step": 8186 + }, + { + "epoch": 6.539460539460539, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8771, + "step": 8187 + }, + { + "epoch": 6.54025974025974, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8782, + "step": 8188 + }, + { + "epoch": 6.541058941058941, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 8189 + }, + { + "epoch": 6.541858141858142, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 8190 + }, + { + "epoch": 6.542657342657343, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8827, + "step": 8191 + }, + { + "epoch": 6.543456543456544, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8827, + "step": 8192 + }, + { + "epoch": 6.544255744255745, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.8879, + "step": 8193 + }, + { + "epoch": 6.545054945054945, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8986, + "step": 8194 + }, + { + "epoch": 6.545854145854146, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.884, + "step": 8195 + }, + { + "epoch": 6.546653346653347, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 8196 + }, + { + "epoch": 6.547452547452547, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8845, + "step": 8197 + }, + { + "epoch": 6.548251748251748, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8911, + "step": 8198 + }, + { + "epoch": 6.549050949050949, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8884, + "step": 8199 + }, + { + "epoch": 6.54985014985015, + "grad_norm": 0.25390625, + "learning_rate": 0.0002, + "loss": 0.8879, + "step": 8200 + }, + { + "epoch": 6.55064935064935, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 8201 + }, + { + "epoch": 6.551448551448551, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.9039, + "step": 8202 + }, + { + "epoch": 6.5522477522477525, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.8884, + "step": 8203 + }, + { + "epoch": 6.5530469530469535, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.8824, + "step": 8204 + }, + { + "epoch": 6.553846153846154, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8964, + "step": 8205 + }, + { + "epoch": 6.554645354645355, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 8206 + }, + { + "epoch": 6.555444555444556, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8812, + "step": 8207 + }, + { + "epoch": 6.556243756243756, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8857, + "step": 8208 + }, + { + "epoch": 6.557042957042957, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 8209 + }, + { + "epoch": 6.557842157842158, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8853, + "step": 8210 + }, + { + "epoch": 6.558641358641359, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.8854, + "step": 8211 + }, + { + "epoch": 6.559440559440559, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8813, + "step": 8212 + }, + { + "epoch": 6.56023976023976, + "grad_norm": 0.232421875, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 8213 + }, + { + "epoch": 6.561038961038961, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 8214 + }, + { + "epoch": 6.561838161838162, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8844, + "step": 8215 + }, + { + "epoch": 6.562637362637362, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8807, + "step": 8216 + }, + { + "epoch": 6.5634365634365635, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 8217 + }, + { + "epoch": 6.5642357642357645, + "grad_norm": 0.2421875, + "learning_rate": 0.0002, + "loss": 0.8865, + "step": 8218 + }, + { + "epoch": 6.565034965034965, + "grad_norm": 0.2412109375, + "learning_rate": 0.0002, + "loss": 0.8816, + "step": 8219 + }, + { + "epoch": 6.565834165834166, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 8220 + }, + { + "epoch": 6.566633366633367, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8845, + "step": 8221 + }, + { + "epoch": 6.567432567432568, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8864, + "step": 8222 + }, + { + "epoch": 6.568231768231768, + "grad_norm": 0.244140625, + "learning_rate": 0.0002, + "loss": 0.8797, + "step": 8223 + }, + { + "epoch": 6.569030969030969, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8886, + "step": 8224 + }, + { + "epoch": 6.56983016983017, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8839, + "step": 8225 + }, + { + "epoch": 6.570629370629371, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 8226 + }, + { + "epoch": 6.571428571428571, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 8227 + }, + { + "epoch": 6.572227772227772, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8866, + "step": 8228 + }, + { + "epoch": 6.573026973026973, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 8229 + }, + { + "epoch": 6.573826173826173, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8803, + "step": 8230 + }, + { + "epoch": 6.574625374625374, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8879, + "step": 8231 + }, + { + "epoch": 6.5754245754245755, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8838, + "step": 8232 + }, + { + "epoch": 6.5762237762237765, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 8233 + }, + { + "epoch": 6.577022977022977, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 8234 + }, + { + "epoch": 6.577822177822178, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8862, + "step": 8235 + }, + { + "epoch": 6.578621378621379, + "grad_norm": 0.240234375, + "learning_rate": 0.0002, + "loss": 0.8864, + "step": 8236 + }, + { + "epoch": 6.57942057942058, + "grad_norm": 0.25, + "learning_rate": 0.0002, + "loss": 0.8888, + "step": 8237 + }, + { + "epoch": 6.58021978021978, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 8238 + }, + { + "epoch": 6.581018981018981, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 8239 + }, + { + "epoch": 6.581818181818182, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.8857, + "step": 8240 + }, + { + "epoch": 6.582617382617382, + "grad_norm": 0.25, + "learning_rate": 0.0002, + "loss": 0.8834, + "step": 8241 + }, + { + "epoch": 6.583416583416583, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 8242 + }, + { + "epoch": 6.584215784215784, + "grad_norm": 0.25390625, + "learning_rate": 0.0002, + "loss": 0.8839, + "step": 8243 + }, + { + "epoch": 6.585014985014985, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8854, + "step": 8244 + }, + { + "epoch": 6.585814185814185, + "grad_norm": 0.25, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 8245 + }, + { + "epoch": 6.5866133866133865, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.885, + "step": 8246 + }, + { + "epoch": 6.5874125874125875, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8888, + "step": 8247 + }, + { + "epoch": 6.5882117882117885, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.8832, + "step": 8248 + }, + { + "epoch": 6.589010989010989, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 8249 + }, + { + "epoch": 6.58981018981019, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 8250 + }, + { + "epoch": 6.590609390609391, + "grad_norm": 0.2431640625, + "learning_rate": 0.0002, + "loss": 0.8844, + "step": 8251 + }, + { + "epoch": 6.591408591408591, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 8252 + }, + { + "epoch": 6.592207792207792, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8914, + "step": 8253 + }, + { + "epoch": 6.593006993006993, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 8254 + }, + { + "epoch": 6.593806193806194, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8875, + "step": 8255 + }, + { + "epoch": 6.594605394605395, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 8256 + }, + { + "epoch": 6.595404595404595, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 8257 + }, + { + "epoch": 6.596203796203796, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 8258 + }, + { + "epoch": 6.597002997002997, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 8259 + }, + { + "epoch": 6.5978021978021975, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 8260 + }, + { + "epoch": 6.5986013986013985, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8923, + "step": 8261 + }, + { + "epoch": 6.5994005994005995, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8978, + "step": 8262 + }, + { + "epoch": 6.600199800199801, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8797, + "step": 8263 + }, + { + "epoch": 6.600999000999001, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 8264 + }, + { + "epoch": 6.601798201798202, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8827, + "step": 8265 + }, + { + "epoch": 6.602597402597403, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8844, + "step": 8266 + }, + { + "epoch": 6.603396603396604, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8812, + "step": 8267 + }, + { + "epoch": 6.604195804195804, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 8268 + }, + { + "epoch": 6.604995004995005, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.8833, + "step": 8269 + }, + { + "epoch": 6.605794205794206, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 8270 + }, + { + "epoch": 6.606593406593406, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8917, + "step": 8271 + }, + { + "epoch": 6.607392607392607, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8859, + "step": 8272 + }, + { + "epoch": 6.608191808191808, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8785, + "step": 8273 + }, + { + "epoch": 6.608991008991009, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.8837, + "step": 8274 + }, + { + "epoch": 6.6097902097902095, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8789, + "step": 8275 + }, + { + "epoch": 6.6105894105894105, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 8276 + }, + { + "epoch": 6.611388611388612, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 8277 + }, + { + "epoch": 6.612187812187813, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 8278 + }, + { + "epoch": 6.612987012987013, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 8279 + }, + { + "epoch": 6.613786213786214, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.9016, + "step": 8280 + }, + { + "epoch": 6.614585414585415, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8891, + "step": 8281 + }, + { + "epoch": 6.615384615384615, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 8282 + }, + { + "epoch": 6.616183816183816, + "grad_norm": 0.63671875, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 8283 + }, + { + "epoch": 6.616983016983017, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8844, + "step": 8284 + }, + { + "epoch": 6.617782217782218, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 8285 + }, + { + "epoch": 6.618581418581418, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.876, + "step": 8286 + }, + { + "epoch": 6.619380619380619, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8861, + "step": 8287 + }, + { + "epoch": 6.62017982017982, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8891, + "step": 8288 + }, + { + "epoch": 6.620979020979021, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 8289 + }, + { + "epoch": 6.6217782217782215, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 8290 + }, + { + "epoch": 6.6225774225774225, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 8291 + }, + { + "epoch": 6.623376623376624, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8803, + "step": 8292 + }, + { + "epoch": 6.624175824175824, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8863, + "step": 8293 + }, + { + "epoch": 6.624975024975025, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 8294 + }, + { + "epoch": 6.625774225774226, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8818, + "step": 8295 + }, + { + "epoch": 6.626573426573427, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.881, + "step": 8296 + }, + { + "epoch": 6.627372627372627, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 8297 + }, + { + "epoch": 6.628171828171828, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8917, + "step": 8298 + }, + { + "epoch": 6.628971028971029, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 8299 + }, + { + "epoch": 6.62977022977023, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 8300 + }, + { + "epoch": 6.63056943056943, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8743, + "step": 8301 + }, + { + "epoch": 6.631368631368631, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 8302 + }, + { + "epoch": 6.632167832167832, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 8303 + }, + { + "epoch": 6.6329670329670325, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 8304 + }, + { + "epoch": 6.6337662337662335, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8853, + "step": 8305 + }, + { + "epoch": 6.634565434565435, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 8306 + }, + { + "epoch": 6.635364635364636, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8817, + "step": 8307 + }, + { + "epoch": 6.636163836163837, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 8308 + }, + { + "epoch": 6.636963036963037, + "grad_norm": 0.376953125, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 8309 + }, + { + "epoch": 6.637762237762238, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 8310 + }, + { + "epoch": 6.638561438561439, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 8311 + }, + { + "epoch": 6.639360639360639, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8895, + "step": 8312 + }, + { + "epoch": 6.64015984015984, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8886, + "step": 8313 + }, + { + "epoch": 6.640959040959041, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.883, + "step": 8314 + }, + { + "epoch": 6.641758241758242, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 8315 + }, + { + "epoch": 6.642557442557442, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.8842, + "step": 8316 + }, + { + "epoch": 6.643356643356643, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8839, + "step": 8317 + }, + { + "epoch": 6.644155844155844, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.878, + "step": 8318 + }, + { + "epoch": 6.644955044955045, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 8319 + }, + { + "epoch": 6.645754245754246, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.8848, + "step": 8320 + }, + { + "epoch": 6.646553446553447, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 8321 + }, + { + "epoch": 6.647352647352648, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 8322 + }, + { + "epoch": 6.648151848151848, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 8323 + }, + { + "epoch": 6.648951048951049, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8851, + "step": 8324 + }, + { + "epoch": 6.64975024975025, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 8325 + }, + { + "epoch": 6.650549450549451, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 8326 + }, + { + "epoch": 6.651348651348651, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 8327 + }, + { + "epoch": 6.652147852147852, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 8328 + }, + { + "epoch": 6.652947052947053, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8826, + "step": 8329 + }, + { + "epoch": 6.653746253746254, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 8330 + }, + { + "epoch": 6.654545454545454, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.882, + "step": 8331 + }, + { + "epoch": 6.655344655344655, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 8332 + }, + { + "epoch": 6.656143856143856, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8856, + "step": 8333 + }, + { + "epoch": 6.6569430569430565, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8906, + "step": 8334 + }, + { + "epoch": 6.657742257742258, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8944, + "step": 8335 + }, + { + "epoch": 6.658541458541459, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8843, + "step": 8336 + }, + { + "epoch": 6.65934065934066, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8877, + "step": 8337 + }, + { + "epoch": 6.66013986013986, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 8338 + }, + { + "epoch": 6.660939060939061, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.88, + "step": 8339 + }, + { + "epoch": 6.661738261738262, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 8340 + }, + { + "epoch": 6.662537462537463, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8853, + "step": 8341 + }, + { + "epoch": 6.663336663336663, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8858, + "step": 8342 + }, + { + "epoch": 6.664135864135864, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8843, + "step": 8343 + }, + { + "epoch": 6.664935064935065, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8886, + "step": 8344 + }, + { + "epoch": 6.665734265734265, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8872, + "step": 8345 + }, + { + "epoch": 6.666533466533466, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8859, + "step": 8346 + }, + { + "epoch": 6.667332667332667, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8786, + "step": 8347 + }, + { + "epoch": 6.668131868131868, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8891, + "step": 8348 + }, + { + "epoch": 6.668931068931069, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 8349 + }, + { + "epoch": 6.66973026973027, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8803, + "step": 8350 + }, + { + "epoch": 6.670529470529471, + "grad_norm": 0.248046875, + "learning_rate": 0.0002, + "loss": 0.8833, + "step": 8351 + }, + { + "epoch": 6.671328671328672, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8869, + "step": 8352 + }, + { + "epoch": 6.672127872127872, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 8353 + }, + { + "epoch": 6.672927072927073, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8853, + "step": 8354 + }, + { + "epoch": 6.673726273726274, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8812, + "step": 8355 + }, + { + "epoch": 6.674525474525474, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8768, + "step": 8356 + }, + { + "epoch": 6.675324675324675, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8823, + "step": 8357 + }, + { + "epoch": 6.676123876123876, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 8358 + }, + { + "epoch": 6.676923076923077, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.8864, + "step": 8359 + }, + { + "epoch": 6.677722277722278, + "grad_norm": 0.2412109375, + "learning_rate": 0.0002, + "loss": 0.8872, + "step": 8360 + }, + { + "epoch": 6.678521478521478, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 8361 + }, + { + "epoch": 6.679320679320679, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.8955, + "step": 8362 + }, + { + "epoch": 6.6801198801198804, + "grad_norm": 0.24609375, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 8363 + }, + { + "epoch": 6.680919080919081, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8834, + "step": 8364 + }, + { + "epoch": 6.681718281718282, + "grad_norm": 0.2470703125, + "learning_rate": 0.0002, + "loss": 0.8826, + "step": 8365 + }, + { + "epoch": 6.682517482517483, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.8844, + "step": 8366 + }, + { + "epoch": 6.683316683316683, + "grad_norm": 0.3515625, + "learning_rate": 0.0002, + "loss": 0.8774, + "step": 8367 + }, + { + "epoch": 6.684115884115884, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 8368 + }, + { + "epoch": 6.684915084915085, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.881, + "step": 8369 + }, + { + "epoch": 6.685714285714286, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 8370 + }, + { + "epoch": 6.686513486513487, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8944, + "step": 8371 + }, + { + "epoch": 6.687312687312687, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.8957, + "step": 8372 + }, + { + "epoch": 6.688111888111888, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 8373 + }, + { + "epoch": 6.688911088911089, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8807, + "step": 8374 + }, + { + "epoch": 6.689710289710289, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.8869, + "step": 8375 + }, + { + "epoch": 6.69050949050949, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.8871, + "step": 8376 + }, + { + "epoch": 6.691308691308691, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8834, + "step": 8377 + }, + { + "epoch": 6.6921078921078925, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.8799, + "step": 8378 + }, + { + "epoch": 6.692907092907093, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8764, + "step": 8379 + }, + { + "epoch": 6.693706293706294, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 8380 + }, + { + "epoch": 6.694505494505495, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8906, + "step": 8381 + }, + { + "epoch": 6.695304695304696, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8895, + "step": 8382 + }, + { + "epoch": 6.696103896103896, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8862, + "step": 8383 + }, + { + "epoch": 6.696903096903097, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 8384 + }, + { + "epoch": 6.697702297702298, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 8385 + }, + { + "epoch": 6.698501498501498, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8819, + "step": 8386 + }, + { + "epoch": 6.699300699300699, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8829, + "step": 8387 + }, + { + "epoch": 6.7000999000999, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8831, + "step": 8388 + }, + { + "epoch": 6.700899100899101, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.9428, + "step": 8389 + }, + { + "epoch": 6.701698301698301, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8845, + "step": 8390 + }, + { + "epoch": 6.702497502497502, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 8391 + }, + { + "epoch": 6.7032967032967035, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 8392 + }, + { + "epoch": 6.7040959040959045, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8801, + "step": 8393 + }, + { + "epoch": 6.704895104895105, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8824, + "step": 8394 + }, + { + "epoch": 6.705694305694306, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8811, + "step": 8395 + }, + { + "epoch": 6.706493506493507, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8867, + "step": 8396 + }, + { + "epoch": 6.707292707292707, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8853, + "step": 8397 + }, + { + "epoch": 6.708091908091908, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8945, + "step": 8398 + }, + { + "epoch": 6.708891108891109, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 8399 + }, + { + "epoch": 6.70969030969031, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8796, + "step": 8400 + }, + { + "epoch": 6.71048951048951, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.8875, + "step": 8401 + }, + { + "epoch": 6.711288711288711, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 8402 + }, + { + "epoch": 6.712087912087912, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8938, + "step": 8403 + }, + { + "epoch": 6.712887112887113, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8759, + "step": 8404 + }, + { + "epoch": 6.713686313686313, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 8405 + }, + { + "epoch": 6.7144855144855145, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.8884, + "step": 8406 + }, + { + "epoch": 6.7152847152847155, + "grad_norm": 0.80859375, + "learning_rate": 0.0002, + "loss": 0.9429, + "step": 8407 + }, + { + "epoch": 6.716083916083916, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.8731, + "step": 8408 + }, + { + "epoch": 6.716883116883117, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 8409 + }, + { + "epoch": 6.717682317682318, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.8899, + "step": 8410 + }, + { + "epoch": 6.718481518481519, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.8899, + "step": 8411 + }, + { + "epoch": 6.719280719280719, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 8412 + }, + { + "epoch": 6.72007992007992, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.8825, + "step": 8413 + }, + { + "epoch": 6.720879120879121, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.8828, + "step": 8414 + }, + { + "epoch": 6.721678321678322, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.901, + "step": 8415 + }, + { + "epoch": 6.722477522477522, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.8801, + "step": 8416 + }, + { + "epoch": 6.723276723276723, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 8417 + }, + { + "epoch": 6.724075924075924, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.8815, + "step": 8418 + }, + { + "epoch": 6.724875124875124, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.882, + "step": 8419 + }, + { + "epoch": 6.725674325674325, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.8938, + "step": 8420 + }, + { + "epoch": 6.7264735264735265, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.8871, + "step": 8421 + }, + { + "epoch": 6.7272727272727275, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 8422 + }, + { + "epoch": 6.7280719280719286, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 8423 + }, + { + "epoch": 6.728871128871129, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.879, + "step": 8424 + }, + { + "epoch": 6.72967032967033, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.8804, + "step": 8425 + }, + { + "epoch": 6.730469530469531, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.8823, + "step": 8426 + }, + { + "epoch": 6.731268731268731, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 8427 + }, + { + "epoch": 6.732067932067932, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.8839, + "step": 8428 + }, + { + "epoch": 6.732867132867133, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.8853, + "step": 8429 + }, + { + "epoch": 6.733666333666334, + "grad_norm": 0.62109375, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 8430 + }, + { + "epoch": 6.734465534465534, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8831, + "step": 8431 + }, + { + "epoch": 6.735264735264735, + "grad_norm": 0.6015625, + "learning_rate": 0.0002, + "loss": 0.8851, + "step": 8432 + }, + { + "epoch": 6.736063936063936, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 8433 + }, + { + "epoch": 6.736863136863137, + "grad_norm": 0.58984375, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 8434 + }, + { + "epoch": 6.7376623376623375, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.899, + "step": 8435 + }, + { + "epoch": 6.7384615384615385, + "grad_norm": 0.58203125, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 8436 + }, + { + "epoch": 6.7392607392607395, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 8437 + }, + { + "epoch": 6.74005994005994, + "grad_norm": 0.60546875, + "learning_rate": 0.0002, + "loss": 0.8767, + "step": 8438 + }, + { + "epoch": 6.740859140859141, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.8839, + "step": 8439 + }, + { + "epoch": 6.741658341658342, + "grad_norm": 0.5859375, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 8440 + }, + { + "epoch": 6.742457542457543, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.884, + "step": 8441 + }, + { + "epoch": 6.743256743256743, + "grad_norm": 0.58203125, + "learning_rate": 0.0002, + "loss": 0.8863, + "step": 8442 + }, + { + "epoch": 6.744055944055944, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 8443 + }, + { + "epoch": 6.744855144855145, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.8843, + "step": 8444 + }, + { + "epoch": 6.745654345654346, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 8445 + }, + { + "epoch": 6.746453546453546, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.8976, + "step": 8446 + }, + { + "epoch": 6.747252747252747, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.8909, + "step": 8447 + }, + { + "epoch": 6.748051948051948, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.8822, + "step": 8448 + }, + { + "epoch": 6.7488511488511485, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.8845, + "step": 8449 + }, + { + "epoch": 6.7496503496503495, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 8450 + }, + { + "epoch": 6.7504495504495505, + "grad_norm": 0.4921875, + "learning_rate": 0.0002, + "loss": 0.8796, + "step": 8451 + }, + { + "epoch": 6.751248751248752, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.8768, + "step": 8452 + }, + { + "epoch": 6.752047952047952, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.8761, + "step": 8453 + }, + { + "epoch": 6.752847152847153, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.8819, + "step": 8454 + }, + { + "epoch": 6.753646353646354, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.8893, + "step": 8455 + }, + { + "epoch": 6.754445554445555, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 8456 + }, + { + "epoch": 6.755244755244755, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.8795, + "step": 8457 + }, + { + "epoch": 6.756043956043956, + "grad_norm": 0.45703125, + "learning_rate": 0.0002, + "loss": 0.8811, + "step": 8458 + }, + { + "epoch": 6.756843156843157, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.8806, + "step": 8459 + }, + { + "epoch": 6.757642357642357, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 8460 + }, + { + "epoch": 6.758441558441558, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.8809, + "step": 8461 + }, + { + "epoch": 6.759240759240759, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.8942, + "step": 8462 + }, + { + "epoch": 6.76003996003996, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.8889, + "step": 8463 + }, + { + "epoch": 6.7608391608391605, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.8813, + "step": 8464 + }, + { + "epoch": 6.7616383616383615, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.8828, + "step": 8465 + }, + { + "epoch": 6.7624375624375626, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 8466 + }, + { + "epoch": 6.763236763236764, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.896, + "step": 8467 + }, + { + "epoch": 6.764035964035964, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 0.8793, + "step": 8468 + }, + { + "epoch": 6.764835164835165, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.8978, + "step": 8469 + }, + { + "epoch": 6.765634365634366, + "grad_norm": 0.59375, + "learning_rate": 0.0002, + "loss": 0.8826, + "step": 8470 + }, + { + "epoch": 6.766433566433566, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 8471 + }, + { + "epoch": 6.767232767232767, + "grad_norm": 0.6796875, + "learning_rate": 0.0002, + "loss": 0.8788, + "step": 8472 + }, + { + "epoch": 6.768031968031968, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.8842, + "step": 8473 + }, + { + "epoch": 6.768831168831169, + "grad_norm": 0.640625, + "learning_rate": 0.0002, + "loss": 0.8917, + "step": 8474 + }, + { + "epoch": 6.76963036963037, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 8475 + }, + { + "epoch": 6.77042957042957, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.8909, + "step": 8476 + }, + { + "epoch": 6.771228771228771, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.8884, + "step": 8477 + }, + { + "epoch": 6.772027972027972, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.8935, + "step": 8478 + }, + { + "epoch": 6.7728271728271725, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 8479 + }, + { + "epoch": 6.7736263736263735, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.8826, + "step": 8480 + }, + { + "epoch": 6.774425574425575, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.8888, + "step": 8481 + }, + { + "epoch": 6.775224775224775, + "grad_norm": 0.8671875, + "learning_rate": 0.0002, + "loss": 0.9084, + "step": 8482 + }, + { + "epoch": 6.776023976023976, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.8779, + "step": 8483 + }, + { + "epoch": 6.776823176823177, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.8828, + "step": 8484 + }, + { + "epoch": 6.777622377622378, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.8906, + "step": 8485 + }, + { + "epoch": 6.778421578421579, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.8845, + "step": 8486 + }, + { + "epoch": 6.779220779220779, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.8869, + "step": 8487 + }, + { + "epoch": 6.78001998001998, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 8488 + }, + { + "epoch": 6.780819180819181, + "grad_norm": 0.39453125, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 8489 + }, + { + "epoch": 6.781618381618381, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.8823, + "step": 8490 + }, + { + "epoch": 6.782417582417582, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 8491 + }, + { + "epoch": 6.783216783216783, + "grad_norm": 0.58203125, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 8492 + }, + { + "epoch": 6.784015984015984, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.8835, + "step": 8493 + }, + { + "epoch": 6.7848151848151845, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.8855, + "step": 8494 + }, + { + "epoch": 6.785614385614386, + "grad_norm": 2.890625, + "learning_rate": 0.0002, + "loss": 0.9272, + "step": 8495 + }, + { + "epoch": 6.786413586413587, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.8826, + "step": 8496 + }, + { + "epoch": 6.787212787212788, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.8776, + "step": 8497 + }, + { + "epoch": 6.788011988011988, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 8498 + }, + { + "epoch": 6.788811188811189, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.8964, + "step": 8499 + }, + { + "epoch": 6.78961038961039, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 8500 + }, + { + "epoch": 6.79040959040959, + "grad_norm": 0.38671875, + "learning_rate": 0.0002, + "loss": 0.8923, + "step": 8501 + }, + { + "epoch": 6.791208791208791, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 8502 + }, + { + "epoch": 6.792007992007992, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.8798, + "step": 8503 + }, + { + "epoch": 6.792807192807193, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.8884, + "step": 8504 + }, + { + "epoch": 6.793606393606393, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.8843, + "step": 8505 + }, + { + "epoch": 6.794405594405594, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8842, + "step": 8506 + }, + { + "epoch": 6.795204795204795, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.885, + "step": 8507 + }, + { + "epoch": 6.796003996003996, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8867, + "step": 8508 + }, + { + "epoch": 6.796803196803197, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8833, + "step": 8509 + }, + { + "epoch": 6.797602397602398, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 8510 + }, + { + "epoch": 6.798401598401599, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8842, + "step": 8511 + }, + { + "epoch": 6.799200799200799, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.8969, + "step": 8512 + }, + { + "epoch": 6.8, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8869, + "step": 8513 + }, + { + "epoch": 6.800799200799201, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 8514 + }, + { + "epoch": 6.801598401598402, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.8839, + "step": 8515 + }, + { + "epoch": 6.802397602397602, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8841, + "step": 8516 + }, + { + "epoch": 6.803196803196803, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8843, + "step": 8517 + }, + { + "epoch": 6.803996003996004, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8867, + "step": 8518 + }, + { + "epoch": 6.804795204795205, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8879, + "step": 8519 + }, + { + "epoch": 6.805594405594405, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8861, + "step": 8520 + }, + { + "epoch": 6.806393606393606, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8866, + "step": 8521 + }, + { + "epoch": 6.807192807192807, + "grad_norm": 0.25390625, + "learning_rate": 0.0002, + "loss": 0.885, + "step": 8522 + }, + { + "epoch": 6.8079920079920075, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 8523 + }, + { + "epoch": 6.808791208791209, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 8524 + }, + { + "epoch": 6.80959040959041, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8821, + "step": 8525 + }, + { + "epoch": 6.810389610389611, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8888, + "step": 8526 + }, + { + "epoch": 6.811188811188811, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8901, + "step": 8527 + }, + { + "epoch": 6.811988011988012, + "grad_norm": 0.248046875, + "learning_rate": 0.0002, + "loss": 0.887, + "step": 8528 + }, + { + "epoch": 6.812787212787213, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 8529 + }, + { + "epoch": 6.813586413586414, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8833, + "step": 8530 + }, + { + "epoch": 6.814385614385614, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8844, + "step": 8531 + }, + { + "epoch": 6.815184815184815, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8822, + "step": 8532 + }, + { + "epoch": 6.815984015984016, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.8778, + "step": 8533 + }, + { + "epoch": 6.816783216783216, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 8534 + }, + { + "epoch": 6.817582417582417, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8861, + "step": 8535 + }, + { + "epoch": 6.818381618381618, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8842, + "step": 8536 + }, + { + "epoch": 6.819180819180819, + "grad_norm": 0.25390625, + "learning_rate": 0.0002, + "loss": 0.8875, + "step": 8537 + }, + { + "epoch": 6.8199800199800205, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 8538 + }, + { + "epoch": 6.820779220779221, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 8539 + }, + { + "epoch": 6.821578421578422, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 8540 + }, + { + "epoch": 6.822377622377623, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8917, + "step": 8541 + }, + { + "epoch": 6.823176823176823, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 8542 + }, + { + "epoch": 6.823976023976024, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 8543 + }, + { + "epoch": 6.824775224775225, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 8544 + }, + { + "epoch": 6.825574425574426, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 8545 + }, + { + "epoch": 6.826373626373626, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8865, + "step": 8546 + }, + { + "epoch": 6.827172827172827, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8866, + "step": 8547 + }, + { + "epoch": 6.827972027972028, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8817, + "step": 8548 + }, + { + "epoch": 6.828771228771229, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8961, + "step": 8549 + }, + { + "epoch": 6.829570429570429, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.9002, + "step": 8550 + }, + { + "epoch": 6.83036963036963, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8824, + "step": 8551 + }, + { + "epoch": 6.8311688311688314, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 8552 + }, + { + "epoch": 6.831968031968032, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8852, + "step": 8553 + }, + { + "epoch": 6.832767232767233, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8739, + "step": 8554 + }, + { + "epoch": 6.833566433566434, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 8555 + }, + { + "epoch": 6.834365634365635, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8886, + "step": 8556 + }, + { + "epoch": 6.835164835164835, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8964, + "step": 8557 + }, + { + "epoch": 6.835964035964036, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8793, + "step": 8558 + }, + { + "epoch": 6.836763236763237, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 8559 + }, + { + "epoch": 6.837562437562438, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 8560 + }, + { + "epoch": 6.838361638361638, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8909, + "step": 8561 + }, + { + "epoch": 6.839160839160839, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8879, + "step": 8562 + }, + { + "epoch": 6.83996003996004, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 8563 + }, + { + "epoch": 6.84075924075924, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8817, + "step": 8564 + }, + { + "epoch": 6.841558441558441, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 8565 + }, + { + "epoch": 6.842357642357642, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8854, + "step": 8566 + }, + { + "epoch": 6.8431568431568435, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 8567 + }, + { + "epoch": 6.843956043956044, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8797, + "step": 8568 + }, + { + "epoch": 6.844755244755245, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 8569 + }, + { + "epoch": 6.845554445554446, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8824, + "step": 8570 + }, + { + "epoch": 6.846353646353647, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.894, + "step": 8571 + }, + { + "epoch": 6.847152847152847, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8847, + "step": 8572 + }, + { + "epoch": 6.847952047952048, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8857, + "step": 8573 + }, + { + "epoch": 6.848751248751249, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 8574 + }, + { + "epoch": 6.849550449550449, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.88, + "step": 8575 + }, + { + "epoch": 6.85034965034965, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8863, + "step": 8576 + }, + { + "epoch": 6.851148851148851, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8856, + "step": 8577 + }, + { + "epoch": 6.851948051948052, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8834, + "step": 8578 + }, + { + "epoch": 6.852747252747252, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8853, + "step": 8579 + }, + { + "epoch": 6.853546453546453, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8879, + "step": 8580 + }, + { + "epoch": 6.8543456543456545, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 8581 + }, + { + "epoch": 6.8551448551448555, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8822, + "step": 8582 + }, + { + "epoch": 6.855944055944056, + "grad_norm": 0.59375, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 8583 + }, + { + "epoch": 6.856743256743257, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 8584 + }, + { + "epoch": 6.857542457542458, + "grad_norm": 0.24609375, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 8585 + }, + { + "epoch": 6.858341658341658, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 8586 + }, + { + "epoch": 6.859140859140859, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 8587 + }, + { + "epoch": 6.85994005994006, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.8884, + "step": 8588 + }, + { + "epoch": 6.860739260739261, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 8589 + }, + { + "epoch": 6.861538461538462, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8891, + "step": 8590 + }, + { + "epoch": 6.862337662337662, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8846, + "step": 8591 + }, + { + "epoch": 6.863136863136863, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 8592 + }, + { + "epoch": 6.863936063936064, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8823, + "step": 8593 + }, + { + "epoch": 6.864735264735264, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8833, + "step": 8594 + }, + { + "epoch": 6.8655344655344654, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8847, + "step": 8595 + }, + { + "epoch": 6.8663336663336665, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8948, + "step": 8596 + }, + { + "epoch": 6.867132867132867, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8848, + "step": 8597 + }, + { + "epoch": 6.867932067932068, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8831, + "step": 8598 + }, + { + "epoch": 6.868731268731269, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 8599 + }, + { + "epoch": 6.86953046953047, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8895, + "step": 8600 + }, + { + "epoch": 6.870329670329671, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8828, + "step": 8601 + }, + { + "epoch": 6.871128871128871, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8797, + "step": 8602 + }, + { + "epoch": 6.871928071928072, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8834, + "step": 8603 + }, + { + "epoch": 6.872727272727273, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8911, + "step": 8604 + }, + { + "epoch": 6.873526473526473, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8938, + "step": 8605 + }, + { + "epoch": 6.874325674325674, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 8606 + }, + { + "epoch": 6.875124875124875, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8875, + "step": 8607 + }, + { + "epoch": 6.875924075924076, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8884, + "step": 8608 + }, + { + "epoch": 6.876723276723276, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8801, + "step": 8609 + }, + { + "epoch": 6.8775224775224775, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8971, + "step": 8610 + }, + { + "epoch": 6.8783216783216785, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 8611 + }, + { + "epoch": 6.8791208791208796, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8821, + "step": 8612 + }, + { + "epoch": 6.87992007992008, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 8613 + }, + { + "epoch": 6.880719280719281, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.8852, + "step": 8614 + }, + { + "epoch": 6.881518481518482, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 8615 + }, + { + "epoch": 6.882317682317682, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.8911, + "step": 8616 + }, + { + "epoch": 6.883116883116883, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8831, + "step": 8617 + }, + { + "epoch": 6.883916083916084, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8815, + "step": 8618 + }, + { + "epoch": 6.884715284715285, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 8619 + }, + { + "epoch": 6.885514485514485, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 8620 + }, + { + "epoch": 6.886313686313686, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8822, + "step": 8621 + }, + { + "epoch": 6.887112887112887, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 8622 + }, + { + "epoch": 6.887912087912088, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 8623 + }, + { + "epoch": 6.8887112887112885, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8844, + "step": 8624 + }, + { + "epoch": 6.8895104895104895, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8827, + "step": 8625 + }, + { + "epoch": 6.8903096903096905, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 8626 + }, + { + "epoch": 6.891108891108891, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.8819, + "step": 8627 + }, + { + "epoch": 6.891908091908092, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 8628 + }, + { + "epoch": 6.892707292707293, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.8819, + "step": 8629 + }, + { + "epoch": 6.893506493506494, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 8630 + }, + { + "epoch": 6.894305694305694, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8865, + "step": 8631 + }, + { + "epoch": 6.895104895104895, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8855, + "step": 8632 + }, + { + "epoch": 6.895904095904096, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8849, + "step": 8633 + }, + { + "epoch": 6.896703296703297, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 8634 + }, + { + "epoch": 6.897502497502497, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8869, + "step": 8635 + }, + { + "epoch": 6.898301698301698, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8808, + "step": 8636 + }, + { + "epoch": 6.899100899100899, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 8637 + }, + { + "epoch": 6.8999000999000994, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8801, + "step": 8638 + }, + { + "epoch": 6.9006993006993005, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8952, + "step": 8639 + }, + { + "epoch": 6.9014985014985015, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8867, + "step": 8640 + }, + { + "epoch": 6.902297702297703, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8767, + "step": 8641 + }, + { + "epoch": 6.903096903096903, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 8642 + }, + { + "epoch": 6.903896103896104, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8817, + "step": 8643 + }, + { + "epoch": 6.904695304695305, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 8644 + }, + { + "epoch": 6.905494505494506, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8846, + "step": 8645 + }, + { + "epoch": 6.906293706293706, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 8646 + }, + { + "epoch": 6.907092907092907, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8865, + "step": 8647 + }, + { + "epoch": 6.907892107892108, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8827, + "step": 8648 + }, + { + "epoch": 6.908691308691308, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8906, + "step": 8649 + }, + { + "epoch": 6.909490509490509, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8867, + "step": 8650 + }, + { + "epoch": 6.91028971028971, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 8651 + }, + { + "epoch": 6.911088911088911, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8825, + "step": 8652 + }, + { + "epoch": 6.911888111888112, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8799, + "step": 8653 + }, + { + "epoch": 6.9126873126873125, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8802, + "step": 8654 + }, + { + "epoch": 6.9134865134865136, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 8655 + }, + { + "epoch": 6.914285714285715, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 8656 + }, + { + "epoch": 6.915084915084915, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8759, + "step": 8657 + }, + { + "epoch": 6.915884115884116, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 8658 + }, + { + "epoch": 6.916683316683317, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 8659 + }, + { + "epoch": 6.917482517482518, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8857, + "step": 8660 + }, + { + "epoch": 6.918281718281718, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.895, + "step": 8661 + }, + { + "epoch": 6.919080919080919, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8832, + "step": 8662 + }, + { + "epoch": 6.91988011988012, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.9028, + "step": 8663 + }, + { + "epoch": 6.920679320679321, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8845, + "step": 8664 + }, + { + "epoch": 6.921478521478521, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.9007, + "step": 8665 + }, + { + "epoch": 6.922277722277722, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8909, + "step": 8666 + }, + { + "epoch": 6.923076923076923, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 8667 + }, + { + "epoch": 6.9238761238761235, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8737, + "step": 8668 + }, + { + "epoch": 6.9246753246753245, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.8848, + "step": 8669 + }, + { + "epoch": 6.925474525474526, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 8670 + }, + { + "epoch": 6.926273726273727, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8842, + "step": 8671 + }, + { + "epoch": 6.927072927072927, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.8833, + "step": 8672 + }, + { + "epoch": 6.927872127872128, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8967, + "step": 8673 + }, + { + "epoch": 6.928671328671329, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8758, + "step": 8674 + }, + { + "epoch": 6.92947052947053, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 8675 + }, + { + "epoch": 6.93026973026973, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 8676 + }, + { + "epoch": 6.931068931068931, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 8677 + }, + { + "epoch": 6.931868131868132, + "grad_norm": 0.2490234375, + "learning_rate": 0.0002, + "loss": 0.8831, + "step": 8678 + }, + { + "epoch": 6.932667332667332, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 8679 + }, + { + "epoch": 6.933466533466533, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 8680 + }, + { + "epoch": 6.934265734265734, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8891, + "step": 8681 + }, + { + "epoch": 6.935064935064935, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.8978, + "step": 8682 + }, + { + "epoch": 6.9358641358641355, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8899, + "step": 8683 + }, + { + "epoch": 6.936663336663337, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 8684 + }, + { + "epoch": 6.937462537462538, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 8685 + }, + { + "epoch": 6.938261738261739, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.8798, + "step": 8686 + }, + { + "epoch": 6.939060939060939, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 8687 + }, + { + "epoch": 6.93986013986014, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8814, + "step": 8688 + }, + { + "epoch": 6.940659340659341, + "grad_norm": 0.330078125, + "learning_rate": 0.0002, + "loss": 0.8886, + "step": 8689 + }, + { + "epoch": 6.941458541458541, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8871, + "step": 8690 + }, + { + "epoch": 6.942257742257742, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 8691 + }, + { + "epoch": 6.943056943056943, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 8692 + }, + { + "epoch": 6.943856143856144, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 8693 + }, + { + "epoch": 6.944655344655344, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8765, + "step": 8694 + }, + { + "epoch": 6.945454545454545, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8851, + "step": 8695 + }, + { + "epoch": 6.946253746253746, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 8696 + }, + { + "epoch": 6.947052947052947, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8838, + "step": 8697 + }, + { + "epoch": 6.9478521478521476, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 8698 + }, + { + "epoch": 6.948651348651349, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8819, + "step": 8699 + }, + { + "epoch": 6.94945054945055, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8832, + "step": 8700 + }, + { + "epoch": 6.95024975024975, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8804, + "step": 8701 + }, + { + "epoch": 6.951048951048951, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8889, + "step": 8702 + }, + { + "epoch": 6.951848151848152, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 8703 + }, + { + "epoch": 6.952647352647353, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 8704 + }, + { + "epoch": 6.953446553446554, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.875, + "step": 8705 + }, + { + "epoch": 6.954245754245754, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 8706 + }, + { + "epoch": 6.955044955044955, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8875, + "step": 8707 + }, + { + "epoch": 6.955844155844156, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 8708 + }, + { + "epoch": 6.956643356643356, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.883, + "step": 8709 + }, + { + "epoch": 6.957442557442557, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 8710 + }, + { + "epoch": 6.958241758241758, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 8711 + }, + { + "epoch": 6.959040959040959, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 8712 + }, + { + "epoch": 6.95984015984016, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8853, + "step": 8713 + }, + { + "epoch": 6.960639360639361, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 8714 + }, + { + "epoch": 6.961438561438562, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8879, + "step": 8715 + }, + { + "epoch": 6.962237762237763, + "grad_norm": 0.3125, + "learning_rate": 0.0002, + "loss": 0.8823, + "step": 8716 + }, + { + "epoch": 6.963036963036963, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 8717 + }, + { + "epoch": 6.963836163836164, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8829, + "step": 8718 + }, + { + "epoch": 6.964635364635365, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.8839, + "step": 8719 + }, + { + "epoch": 6.965434565434565, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 8720 + }, + { + "epoch": 6.966233766233766, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8872, + "step": 8721 + }, + { + "epoch": 6.967032967032967, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8851, + "step": 8722 + }, + { + "epoch": 6.967832167832168, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.882, + "step": 8723 + }, + { + "epoch": 6.968631368631368, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8821, + "step": 8724 + }, + { + "epoch": 6.969430569430569, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.882, + "step": 8725 + }, + { + "epoch": 6.97022977022977, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 8726 + }, + { + "epoch": 6.9710289710289715, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 8727 + }, + { + "epoch": 6.971828171828172, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.8827, + "step": 8728 + }, + { + "epoch": 6.972627372627373, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8859, + "step": 8729 + }, + { + "epoch": 6.973426573426574, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 8730 + }, + { + "epoch": 6.974225774225774, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8867, + "step": 8731 + }, + { + "epoch": 6.975024975024975, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8837, + "step": 8732 + }, + { + "epoch": 6.975824175824176, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8774, + "step": 8733 + }, + { + "epoch": 6.976623376623377, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8925, + "step": 8734 + }, + { + "epoch": 6.977422577422577, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 8735 + }, + { + "epoch": 6.978221778221778, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8867, + "step": 8736 + }, + { + "epoch": 6.979020979020979, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8886, + "step": 8737 + }, + { + "epoch": 6.97982017982018, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8819, + "step": 8738 + }, + { + "epoch": 6.98061938061938, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 8739 + }, + { + "epoch": 6.981418581418581, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8836, + "step": 8740 + }, + { + "epoch": 6.982217782217782, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8829, + "step": 8741 + }, + { + "epoch": 6.983016983016983, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 8742 + }, + { + "epoch": 6.983816183816184, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 8743 + }, + { + "epoch": 6.984615384615385, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8996, + "step": 8744 + }, + { + "epoch": 6.985414585414586, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8853, + "step": 8745 + }, + { + "epoch": 6.986213786213786, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8872, + "step": 8746 + }, + { + "epoch": 6.987012987012987, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8843, + "step": 8747 + }, + { + "epoch": 6.987812187812188, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8859, + "step": 8748 + }, + { + "epoch": 6.988611388611389, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8824, + "step": 8749 + }, + { + "epoch": 6.989410589410589, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8859, + "step": 8750 + }, + { + "epoch": 6.99020979020979, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8856, + "step": 8751 + }, + { + "epoch": 6.991008991008991, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 8752 + }, + { + "epoch": 6.991808191808191, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 8753 + }, + { + "epoch": 6.992607392607392, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 8754 + }, + { + "epoch": 6.993406593406593, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8883, + "step": 8755 + }, + { + "epoch": 6.9942057942057945, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8698, + "step": 8756 + }, + { + "epoch": 6.995004995004995, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 8757 + }, + { + "epoch": 6.995804195804196, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8783, + "step": 8758 + }, + { + "epoch": 6.996603396603397, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 8759 + }, + { + "epoch": 6.997402597402598, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8867, + "step": 8760 + }, + { + "epoch": 6.998201798201798, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 8761 + }, + { + "epoch": 6.999000999000999, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 8762 + }, + { + "epoch": 6.9998001998002, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.887, + "step": 8763 + }, + { + "epoch": 7.0, + "grad_norm": 0.08837890625, + "learning_rate": 0.0002, + "loss": 0.22, + "step": 8764 + }, + { + "epoch": 7.000799200799201, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.9319, + "step": 8765 + }, + { + "epoch": 7.001598401598401, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8889, + "step": 8766 + }, + { + "epoch": 7.002397602397602, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8969, + "step": 8767 + }, + { + "epoch": 7.003196803196803, + "grad_norm": 1.5078125, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 8768 + }, + { + "epoch": 7.003996003996004, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8816, + "step": 8769 + }, + { + "epoch": 7.0047952047952045, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 8770 + }, + { + "epoch": 7.0055944055944055, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8886, + "step": 8771 + }, + { + "epoch": 7.0063936063936065, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 8772 + }, + { + "epoch": 7.007192807192808, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 8773 + }, + { + "epoch": 7.007992007992008, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8836, + "step": 8774 + }, + { + "epoch": 7.008791208791209, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 8775 + }, + { + "epoch": 7.00959040959041, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.8855, + "step": 8776 + }, + { + "epoch": 7.01038961038961, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.8838, + "step": 8777 + }, + { + "epoch": 7.011188811188811, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 8778 + }, + { + "epoch": 7.011988011988012, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.884, + "step": 8779 + }, + { + "epoch": 7.012787212787213, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 8780 + }, + { + "epoch": 7.013586413586413, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.8805, + "step": 8781 + }, + { + "epoch": 7.014385614385614, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.8801, + "step": 8782 + }, + { + "epoch": 7.015184815184815, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8809, + "step": 8783 + }, + { + "epoch": 7.015984015984016, + "grad_norm": 0.34765625, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 8784 + }, + { + "epoch": 7.0167832167832165, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8954, + "step": 8785 + }, + { + "epoch": 7.0175824175824175, + "grad_norm": 0.333984375, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 8786 + }, + { + "epoch": 7.018381618381619, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 8787 + }, + { + "epoch": 7.01918081918082, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8809, + "step": 8788 + }, + { + "epoch": 7.01998001998002, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8891, + "step": 8789 + }, + { + "epoch": 7.020779220779221, + "grad_norm": 0.78515625, + "learning_rate": 0.0002, + "loss": 0.908, + "step": 8790 + }, + { + "epoch": 7.021578421578422, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 8791 + }, + { + "epoch": 7.022377622377622, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8822, + "step": 8792 + }, + { + "epoch": 7.023176823176823, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.8854, + "step": 8793 + }, + { + "epoch": 7.023976023976024, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8836, + "step": 8794 + }, + { + "epoch": 7.024775224775225, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 8795 + }, + { + "epoch": 7.025574425574425, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8857, + "step": 8796 + }, + { + "epoch": 7.026373626373626, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.876, + "step": 8797 + }, + { + "epoch": 7.027172827172827, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8844, + "step": 8798 + }, + { + "epoch": 7.027972027972028, + "grad_norm": 0.345703125, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 8799 + }, + { + "epoch": 7.0287712287712285, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.8886, + "step": 8800 + }, + { + "epoch": 7.0295704295704295, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 8801 + }, + { + "epoch": 7.030369630369631, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.882, + "step": 8802 + }, + { + "epoch": 7.031168831168831, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 8803 + }, + { + "epoch": 7.031968031968032, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8795, + "step": 8804 + }, + { + "epoch": 7.032767232767233, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.885, + "step": 8805 + }, + { + "epoch": 7.033566433566434, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8899, + "step": 8806 + }, + { + "epoch": 7.034365634365634, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 8807 + }, + { + "epoch": 7.035164835164835, + "grad_norm": 0.33984375, + "learning_rate": 0.0002, + "loss": 0.8792, + "step": 8808 + }, + { + "epoch": 7.035964035964036, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8942, + "step": 8809 + }, + { + "epoch": 7.036763236763237, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 8810 + }, + { + "epoch": 7.037562437562437, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8866, + "step": 8811 + }, + { + "epoch": 7.038361638361638, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 8812 + }, + { + "epoch": 7.039160839160839, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8886, + "step": 8813 + }, + { + "epoch": 7.03996003996004, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8756, + "step": 8814 + }, + { + "epoch": 7.0407592407592405, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8812, + "step": 8815 + }, + { + "epoch": 7.041558441558442, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8808, + "step": 8816 + }, + { + "epoch": 7.042357642357643, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8937, + "step": 8817 + }, + { + "epoch": 7.043156843156843, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8788, + "step": 8818 + }, + { + "epoch": 7.043956043956044, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.883, + "step": 8819 + }, + { + "epoch": 7.044755244755245, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8906, + "step": 8820 + }, + { + "epoch": 7.045554445554446, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8851, + "step": 8821 + }, + { + "epoch": 7.046353646353646, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8719, + "step": 8822 + }, + { + "epoch": 7.047152847152847, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8837, + "step": 8823 + }, + { + "epoch": 7.047952047952048, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8869, + "step": 8824 + }, + { + "epoch": 7.048751248751249, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8879, + "step": 8825 + }, + { + "epoch": 7.049550449550449, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 8826 + }, + { + "epoch": 7.05034965034965, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8883, + "step": 8827 + }, + { + "epoch": 7.051148851148851, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8844, + "step": 8828 + }, + { + "epoch": 7.0519480519480515, + "grad_norm": 0.31640625, + "learning_rate": 0.0002, + "loss": 0.8771, + "step": 8829 + }, + { + "epoch": 7.052747252747253, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8811, + "step": 8830 + }, + { + "epoch": 7.053546453546454, + "grad_norm": 0.271484375, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 8831 + }, + { + "epoch": 7.054345654345655, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8848, + "step": 8832 + }, + { + "epoch": 7.055144855144855, + "grad_norm": 0.341796875, + "learning_rate": 0.0002, + "loss": 0.8846, + "step": 8833 + }, + { + "epoch": 7.055944055944056, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8822, + "step": 8834 + }, + { + "epoch": 7.056743256743257, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.8855, + "step": 8835 + }, + { + "epoch": 7.057542457542458, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 8836 + }, + { + "epoch": 7.058341658341658, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 8837 + }, + { + "epoch": 7.059140859140859, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 8838 + }, + { + "epoch": 7.05994005994006, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8869, + "step": 8839 + }, + { + "epoch": 7.060739260739261, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8817, + "step": 8840 + }, + { + "epoch": 7.061538461538461, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8792, + "step": 8841 + }, + { + "epoch": 7.062337662337662, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.8822, + "step": 8842 + }, + { + "epoch": 7.063136863136863, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8866, + "step": 8843 + }, + { + "epoch": 7.0639360639360635, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8795, + "step": 8844 + }, + { + "epoch": 7.064735264735265, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8899, + "step": 8845 + }, + { + "epoch": 7.065534465534466, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.879, + "step": 8846 + }, + { + "epoch": 7.066333666333667, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 8847 + }, + { + "epoch": 7.067132867132867, + "grad_norm": 3.140625, + "learning_rate": 0.0002, + "loss": 0.9294, + "step": 8848 + }, + { + "epoch": 7.067932067932068, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8847, + "step": 8849 + }, + { + "epoch": 7.068731268731269, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8826, + "step": 8850 + }, + { + "epoch": 7.06953046953047, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8854, + "step": 8851 + }, + { + "epoch": 7.07032967032967, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 8852 + }, + { + "epoch": 7.071128871128871, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 8853 + }, + { + "epoch": 7.071928071928072, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8847, + "step": 8854 + }, + { + "epoch": 7.072727272727272, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.872, + "step": 8855 + }, + { + "epoch": 7.073526473526473, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.8929, + "step": 8856 + }, + { + "epoch": 7.074325674325674, + "grad_norm": 0.4296875, + "learning_rate": 0.0002, + "loss": 0.8827, + "step": 8857 + }, + { + "epoch": 7.075124875124875, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.8791, + "step": 8858 + }, + { + "epoch": 7.075924075924076, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.877, + "step": 8859 + }, + { + "epoch": 7.076723276723277, + "grad_norm": 0.5, + "learning_rate": 0.0002, + "loss": 0.8854, + "step": 8860 + }, + { + "epoch": 7.077522477522478, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.89, + "step": 8861 + }, + { + "epoch": 7.078321678321679, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.8949, + "step": 8862 + }, + { + "epoch": 7.079120879120879, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.8859, + "step": 8863 + }, + { + "epoch": 7.07992007992008, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 8864 + }, + { + "epoch": 7.080719280719281, + "grad_norm": 0.484375, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 8865 + }, + { + "epoch": 7.081518481518482, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.8905, + "step": 8866 + }, + { + "epoch": 7.082317682317682, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 8867 + }, + { + "epoch": 7.083116883116883, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 8868 + }, + { + "epoch": 7.083916083916084, + "grad_norm": 0.384765625, + "learning_rate": 0.0002, + "loss": 0.8808, + "step": 8869 + }, + { + "epoch": 7.084715284715284, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.8817, + "step": 8870 + }, + { + "epoch": 7.085514485514485, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.8845, + "step": 8871 + }, + { + "epoch": 7.086313686313686, + "grad_norm": 0.62109375, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 8872 + }, + { + "epoch": 7.0871128871128874, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.8843, + "step": 8873 + }, + { + "epoch": 7.087912087912088, + "grad_norm": 0.6953125, + "learning_rate": 0.0002, + "loss": 0.8852, + "step": 8874 + }, + { + "epoch": 7.088711288711289, + "grad_norm": 0.36328125, + "learning_rate": 0.0002, + "loss": 0.8883, + "step": 8875 + }, + { + "epoch": 7.08951048951049, + "grad_norm": 0.6484375, + "learning_rate": 0.0002, + "loss": 0.8786, + "step": 8876 + }, + { + "epoch": 7.090309690309691, + "grad_norm": 0.380859375, + "learning_rate": 0.0002, + "loss": 0.8849, + "step": 8877 + }, + { + "epoch": 7.091108891108891, + "grad_norm": 0.6015625, + "learning_rate": 0.0002, + "loss": 0.8957, + "step": 8878 + }, + { + "epoch": 7.091908091908092, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.8838, + "step": 8879 + }, + { + "epoch": 7.092707292707293, + "grad_norm": 0.6328125, + "learning_rate": 0.0002, + "loss": 0.882, + "step": 8880 + }, + { + "epoch": 7.093506493506493, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 8881 + }, + { + "epoch": 7.094305694305694, + "grad_norm": 0.72265625, + "learning_rate": 0.0002, + "loss": 0.8895, + "step": 8882 + }, + { + "epoch": 7.095104895104895, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.8863, + "step": 8883 + }, + { + "epoch": 7.095904095904096, + "grad_norm": 0.67578125, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 8884 + }, + { + "epoch": 7.096703296703296, + "grad_norm": 0.5546875, + "learning_rate": 0.0002, + "loss": 0.8852, + "step": 8885 + }, + { + "epoch": 7.097502497502497, + "grad_norm": 0.6171875, + "learning_rate": 0.0002, + "loss": 0.8932, + "step": 8886 + }, + { + "epoch": 7.098301698301698, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.8924, + "step": 8887 + }, + { + "epoch": 7.0991008991008995, + "grad_norm": 0.609375, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 8888 + }, + { + "epoch": 7.0999000999001, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.8895, + "step": 8889 + }, + { + "epoch": 7.100699300699301, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.8909, + "step": 8890 + }, + { + "epoch": 7.101498501498502, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 8891 + }, + { + "epoch": 7.102297702297703, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 8892 + }, + { + "epoch": 7.103096903096903, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.8827, + "step": 8893 + }, + { + "epoch": 7.103896103896104, + "grad_norm": 0.494140625, + "learning_rate": 0.0002, + "loss": 0.8853, + "step": 8894 + }, + { + "epoch": 7.104695304695305, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.8826, + "step": 8895 + }, + { + "epoch": 7.105494505494505, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.8877, + "step": 8896 + }, + { + "epoch": 7.106293706293706, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.8846, + "step": 8897 + }, + { + "epoch": 7.107092907092907, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.887, + "step": 8898 + }, + { + "epoch": 7.107892107892108, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.8803, + "step": 8899 + }, + { + "epoch": 7.108691308691308, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 8900 + }, + { + "epoch": 7.109490509490509, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 8901 + }, + { + "epoch": 7.1102897102897105, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.8871, + "step": 8902 + }, + { + "epoch": 7.1110889110889115, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.8866, + "step": 8903 + }, + { + "epoch": 7.111888111888112, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.8877, + "step": 8904 + }, + { + "epoch": 7.112687312687313, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.8842, + "step": 8905 + }, + { + "epoch": 7.113486513486514, + "grad_norm": 0.388671875, + "learning_rate": 0.0002, + "loss": 0.8855, + "step": 8906 + }, + { + "epoch": 7.114285714285714, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 8907 + }, + { + "epoch": 7.115084915084915, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.8852, + "step": 8908 + }, + { + "epoch": 7.115884115884116, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 8909 + }, + { + "epoch": 7.116683316683317, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 8910 + }, + { + "epoch": 7.117482517482517, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.8865, + "step": 8911 + }, + { + "epoch": 7.118281718281718, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.8849, + "step": 8912 + }, + { + "epoch": 7.119080919080919, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.9008, + "step": 8913 + }, + { + "epoch": 7.11988011988012, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8813, + "step": 8914 + }, + { + "epoch": 7.12067932067932, + "grad_norm": 0.416015625, + "learning_rate": 0.0002, + "loss": 0.8834, + "step": 8915 + }, + { + "epoch": 7.1214785214785215, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8872, + "step": 8916 + }, + { + "epoch": 7.1222777222777225, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 8917 + }, + { + "epoch": 7.123076923076923, + "grad_norm": 0.357421875, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 8918 + }, + { + "epoch": 7.123876123876124, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.8872, + "step": 8919 + }, + { + "epoch": 7.124675324675325, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 8920 + }, + { + "epoch": 7.125474525474526, + "grad_norm": 0.58203125, + "learning_rate": 0.0002, + "loss": 0.8856, + "step": 8921 + }, + { + "epoch": 7.126273726273726, + "grad_norm": 0.37109375, + "learning_rate": 0.0002, + "loss": 0.8851, + "step": 8922 + }, + { + "epoch": 7.127072927072927, + "grad_norm": 0.71875, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 8923 + }, + { + "epoch": 7.127872127872128, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 8924 + }, + { + "epoch": 7.128671328671329, + "grad_norm": 0.63671875, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 8925 + }, + { + "epoch": 7.129470529470529, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 8926 + }, + { + "epoch": 7.13026973026973, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.8926, + "step": 8927 + }, + { + "epoch": 7.131068931068931, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.8899, + "step": 8928 + }, + { + "epoch": 7.131868131868132, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.8832, + "step": 8929 + }, + { + "epoch": 7.132667332667332, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 8930 + }, + { + "epoch": 7.1334665334665335, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 8931 + }, + { + "epoch": 7.1342657342657345, + "grad_norm": 0.47265625, + "learning_rate": 0.0002, + "loss": 0.8878, + "step": 8932 + }, + { + "epoch": 7.135064935064935, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.8852, + "step": 8933 + }, + { + "epoch": 7.135864135864136, + "grad_norm": 0.4453125, + "learning_rate": 0.0002, + "loss": 0.8841, + "step": 8934 + }, + { + "epoch": 7.136663336663337, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.8867, + "step": 8935 + }, + { + "epoch": 7.137462537462538, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.8883, + "step": 8936 + }, + { + "epoch": 7.138261738261738, + "grad_norm": 0.51953125, + "learning_rate": 0.0002, + "loss": 0.8849, + "step": 8937 + }, + { + "epoch": 7.139060939060939, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.8927, + "step": 8938 + }, + { + "epoch": 7.13986013986014, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.8836, + "step": 8939 + }, + { + "epoch": 7.140659340659341, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.8737, + "step": 8940 + }, + { + "epoch": 7.141458541458541, + "grad_norm": 0.55078125, + "learning_rate": 0.0002, + "loss": 0.8902, + "step": 8941 + }, + { + "epoch": 7.142257742257742, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.8846, + "step": 8942 + }, + { + "epoch": 7.143056943056943, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 8943 + }, + { + "epoch": 7.143856143856144, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.8751, + "step": 8944 + }, + { + "epoch": 7.1446553446553445, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.885, + "step": 8945 + }, + { + "epoch": 7.1454545454545455, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.8846, + "step": 8946 + }, + { + "epoch": 7.1462537462537465, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.884, + "step": 8947 + }, + { + "epoch": 7.147052947052947, + "grad_norm": 0.419921875, + "learning_rate": 0.0002, + "loss": 0.8883, + "step": 8948 + }, + { + "epoch": 7.147852147852148, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.8818, + "step": 8949 + }, + { + "epoch": 7.148651348651349, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.8793, + "step": 8950 + }, + { + "epoch": 7.14945054945055, + "grad_norm": 0.59765625, + "learning_rate": 0.0002, + "loss": 0.8906, + "step": 8951 + }, + { + "epoch": 7.15024975024975, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.8824, + "step": 8952 + }, + { + "epoch": 7.151048951048951, + "grad_norm": 0.63671875, + "learning_rate": 0.0002, + "loss": 0.8836, + "step": 8953 + }, + { + "epoch": 7.151848151848152, + "grad_norm": 0.400390625, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 8954 + }, + { + "epoch": 7.152647352647353, + "grad_norm": 0.62890625, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 8955 + }, + { + "epoch": 7.153446553446553, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 8956 + }, + { + "epoch": 7.154245754245754, + "grad_norm": 0.671875, + "learning_rate": 0.0002, + "loss": 0.8828, + "step": 8957 + }, + { + "epoch": 7.155044955044955, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.8853, + "step": 8958 + }, + { + "epoch": 7.1558441558441555, + "grad_norm": 0.6796875, + "learning_rate": 0.0002, + "loss": 0.8842, + "step": 8959 + }, + { + "epoch": 7.1566433566433565, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 8960 + }, + { + "epoch": 7.1574425574425575, + "grad_norm": 0.62109375, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 8961 + }, + { + "epoch": 7.158241758241759, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.8841, + "step": 8962 + }, + { + "epoch": 7.159040959040959, + "grad_norm": 0.55859375, + "learning_rate": 0.0002, + "loss": 0.8872, + "step": 8963 + }, + { + "epoch": 7.15984015984016, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.8849, + "step": 8964 + }, + { + "epoch": 7.160639360639361, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 8965 + }, + { + "epoch": 7.161438561438562, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.8818, + "step": 8966 + }, + { + "epoch": 7.162237762237762, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.892, + "step": 8967 + }, + { + "epoch": 7.163036963036963, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.8866, + "step": 8968 + }, + { + "epoch": 7.163836163836164, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.876, + "step": 8969 + }, + { + "epoch": 7.164635364635364, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.8919, + "step": 8970 + }, + { + "epoch": 7.165434565434565, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.8837, + "step": 8971 + }, + { + "epoch": 7.166233766233766, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.891, + "step": 8972 + }, + { + "epoch": 7.167032967032967, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.8851, + "step": 8973 + }, + { + "epoch": 7.1678321678321675, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.8796, + "step": 8974 + }, + { + "epoch": 7.1686313686313685, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.8831, + "step": 8975 + }, + { + "epoch": 7.1694305694305696, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 8976 + }, + { + "epoch": 7.170229770229771, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 8977 + }, + { + "epoch": 7.171028971028971, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.8953, + "step": 8978 + }, + { + "epoch": 7.171828171828172, + "grad_norm": 0.43359375, + "learning_rate": 0.0002, + "loss": 0.8826, + "step": 8979 + }, + { + "epoch": 7.172627372627373, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.8797, + "step": 8980 + }, + { + "epoch": 7.173426573426573, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.8802, + "step": 8981 + }, + { + "epoch": 7.174225774225774, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.8857, + "step": 8982 + }, + { + "epoch": 7.175024975024975, + "grad_norm": 0.44140625, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 8983 + }, + { + "epoch": 7.175824175824176, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.8914, + "step": 8984 + }, + { + "epoch": 7.176623376623376, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 8985 + }, + { + "epoch": 7.177422577422577, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8829, + "step": 8986 + }, + { + "epoch": 7.178221778221778, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.8859, + "step": 8987 + }, + { + "epoch": 7.179020979020979, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.8816, + "step": 8988 + }, + { + "epoch": 7.1798201798201795, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.8781, + "step": 8989 + }, + { + "epoch": 7.1806193806193805, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 8990 + }, + { + "epoch": 7.181418581418582, + "grad_norm": 0.671875, + "learning_rate": 0.0002, + "loss": 0.8908, + "step": 8991 + }, + { + "epoch": 7.182217782217783, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.8753, + "step": 8992 + }, + { + "epoch": 7.183016983016983, + "grad_norm": 0.68359375, + "learning_rate": 0.0002, + "loss": 0.8835, + "step": 8993 + }, + { + "epoch": 7.183816183816184, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.8771, + "step": 8994 + }, + { + "epoch": 7.184615384615385, + "grad_norm": 0.65625, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 8995 + }, + { + "epoch": 7.185414585414585, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 8996 + }, + { + "epoch": 7.186213786213786, + "grad_norm": 0.6484375, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 8997 + }, + { + "epoch": 7.187012987012987, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 8998 + }, + { + "epoch": 7.187812187812188, + "grad_norm": 0.63671875, + "learning_rate": 0.0002, + "loss": 0.8892, + "step": 8999 + }, + { + "epoch": 7.188611388611388, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.896, + "step": 9000 + }, + { + "epoch": 7.189410589410589, + "grad_norm": 0.66015625, + "learning_rate": 0.0002, + "loss": 0.8858, + "step": 9001 + }, + { + "epoch": 7.19020979020979, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 9002 + }, + { + "epoch": 7.191008991008991, + "grad_norm": 0.640625, + "learning_rate": 0.0002, + "loss": 0.8855, + "step": 9003 + }, + { + "epoch": 7.1918081918081915, + "grad_norm": 0.46875, + "learning_rate": 0.0002, + "loss": 0.8837, + "step": 9004 + }, + { + "epoch": 7.192607392607393, + "grad_norm": 0.62890625, + "learning_rate": 0.0002, + "loss": 0.8866, + "step": 9005 + }, + { + "epoch": 7.193406593406594, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.8877, + "step": 9006 + }, + { + "epoch": 7.194205794205795, + "grad_norm": 0.5859375, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 9007 + }, + { + "epoch": 7.195004995004995, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.8826, + "step": 9008 + }, + { + "epoch": 7.195804195804196, + "grad_norm": 0.5625, + "learning_rate": 0.0002, + "loss": 0.8818, + "step": 9009 + }, + { + "epoch": 7.196603396603397, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.8841, + "step": 9010 + }, + { + "epoch": 7.197402597402597, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.8926, + "step": 9011 + }, + { + "epoch": 7.198201798201798, + "grad_norm": 0.48046875, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 9012 + }, + { + "epoch": 7.199000999000999, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 9013 + }, + { + "epoch": 7.1998001998002, + "grad_norm": 0.466796875, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 9014 + }, + { + "epoch": 7.2005994005994, + "grad_norm": 0.546875, + "learning_rate": 0.0002, + "loss": 0.8838, + "step": 9015 + }, + { + "epoch": 7.201398601398601, + "grad_norm": 0.423828125, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 9016 + }, + { + "epoch": 7.202197802197802, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.8867, + "step": 9017 + }, + { + "epoch": 7.202997002997003, + "grad_norm": 0.41796875, + "learning_rate": 0.0002, + "loss": 0.8872, + "step": 9018 + }, + { + "epoch": 7.203796203796204, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.8869, + "step": 9019 + }, + { + "epoch": 7.204595404595405, + "grad_norm": 0.427734375, + "learning_rate": 0.0002, + "loss": 0.8861, + "step": 9020 + }, + { + "epoch": 7.205394605394606, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.8893, + "step": 9021 + }, + { + "epoch": 7.206193806193806, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 9022 + }, + { + "epoch": 7.206993006993007, + "grad_norm": 0.54296875, + "learning_rate": 0.0002, + "loss": 0.887, + "step": 9023 + }, + { + "epoch": 7.207792207792208, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.8791, + "step": 9024 + }, + { + "epoch": 7.208591408591409, + "grad_norm": 0.62109375, + "learning_rate": 0.0002, + "loss": 0.8837, + "step": 9025 + }, + { + "epoch": 7.209390609390609, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.8858, + "step": 9026 + }, + { + "epoch": 7.21018981018981, + "grad_norm": 0.69140625, + "learning_rate": 0.0002, + "loss": 0.8872, + "step": 9027 + }, + { + "epoch": 7.210989010989011, + "grad_norm": 0.4140625, + "learning_rate": 0.0002, + "loss": 0.9425, + "step": 9028 + }, + { + "epoch": 7.211788211788212, + "grad_norm": 0.6796875, + "learning_rate": 0.0002, + "loss": 0.8894, + "step": 9029 + }, + { + "epoch": 7.212587412587412, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 9030 + }, + { + "epoch": 7.213386613386613, + "grad_norm": 0.6484375, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 9031 + }, + { + "epoch": 7.214185814185814, + "grad_norm": 0.490234375, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 9032 + }, + { + "epoch": 7.2149850149850145, + "grad_norm": 0.63671875, + "learning_rate": 0.0002, + "loss": 0.8841, + "step": 9033 + }, + { + "epoch": 7.215784215784216, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 9034 + }, + { + "epoch": 7.216583416583417, + "grad_norm": 0.6640625, + "learning_rate": 0.0002, + "loss": 0.8938, + "step": 9035 + }, + { + "epoch": 7.217382617382618, + "grad_norm": 0.462890625, + "learning_rate": 0.0002, + "loss": 0.8786, + "step": 9036 + }, + { + "epoch": 7.218181818181818, + "grad_norm": 0.65234375, + "learning_rate": 0.0002, + "loss": 0.8838, + "step": 9037 + }, + { + "epoch": 7.218981018981019, + "grad_norm": 0.4609375, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 9038 + }, + { + "epoch": 7.21978021978022, + "grad_norm": 0.68359375, + "learning_rate": 0.0002, + "loss": 0.8855, + "step": 9039 + }, + { + "epoch": 7.220579420579421, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.8833, + "step": 9040 + }, + { + "epoch": 7.221378621378621, + "grad_norm": 0.6484375, + "learning_rate": 0.0002, + "loss": 0.8851, + "step": 9041 + }, + { + "epoch": 7.222177822177822, + "grad_norm": 0.486328125, + "learning_rate": 0.0002, + "loss": 0.8818, + "step": 9042 + }, + { + "epoch": 7.222977022977023, + "grad_norm": 0.625, + "learning_rate": 0.0002, + "loss": 0.8851, + "step": 9043 + }, + { + "epoch": 7.223776223776224, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.8798, + "step": 9044 + }, + { + "epoch": 7.224575424575424, + "grad_norm": 0.6171875, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 9045 + }, + { + "epoch": 7.225374625374625, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.8865, + "step": 9046 + }, + { + "epoch": 7.226173826173826, + "grad_norm": 0.60546875, + "learning_rate": 0.0002, + "loss": 0.8752, + "step": 9047 + }, + { + "epoch": 7.226973026973027, + "grad_norm": 0.498046875, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 9048 + }, + { + "epoch": 7.227772227772228, + "grad_norm": 0.5859375, + "learning_rate": 0.0002, + "loss": 0.882, + "step": 9049 + }, + { + "epoch": 7.228571428571429, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.8743, + "step": 9050 + }, + { + "epoch": 7.22937062937063, + "grad_norm": 0.5703125, + "learning_rate": 0.0002, + "loss": 0.8798, + "step": 9051 + }, + { + "epoch": 7.23016983016983, + "grad_norm": 0.46484375, + "learning_rate": 0.0002, + "loss": 0.8724, + "step": 9052 + }, + { + "epoch": 7.230969030969031, + "grad_norm": 0.59375, + "learning_rate": 0.0002, + "loss": 0.8824, + "step": 9053 + }, + { + "epoch": 7.231768231768232, + "grad_norm": 0.435546875, + "learning_rate": 0.0002, + "loss": 0.8828, + "step": 9054 + }, + { + "epoch": 7.232567432567433, + "grad_norm": 0.61328125, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 9055 + }, + { + "epoch": 7.233366633366633, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.883, + "step": 9056 + }, + { + "epoch": 7.234165834165834, + "grad_norm": 0.63671875, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 9057 + }, + { + "epoch": 7.234965034965035, + "grad_norm": 0.470703125, + "learning_rate": 0.0002, + "loss": 0.8875, + "step": 9058 + }, + { + "epoch": 7.235764235764236, + "grad_norm": 0.62109375, + "learning_rate": 0.0002, + "loss": 0.885, + "step": 9059 + }, + { + "epoch": 7.236563436563436, + "grad_norm": 0.453125, + "learning_rate": 0.0002, + "loss": 0.8823, + "step": 9060 + }, + { + "epoch": 7.237362637362637, + "grad_norm": 0.6796875, + "learning_rate": 0.0002, + "loss": 0.8826, + "step": 9061 + }, + { + "epoch": 7.2381618381618384, + "grad_norm": 0.439453125, + "learning_rate": 0.0002, + "loss": 0.8956, + "step": 9062 + }, + { + "epoch": 7.238961038961039, + "grad_norm": 0.73046875, + "learning_rate": 0.0002, + "loss": 0.8842, + "step": 9063 + }, + { + "epoch": 7.23976023976024, + "grad_norm": 0.48828125, + "learning_rate": 0.0002, + "loss": 0.8783, + "step": 9064 + }, + { + "epoch": 7.240559440559441, + "grad_norm": 0.81640625, + "learning_rate": 0.0002, + "loss": 0.9383, + "step": 9065 + }, + { + "epoch": 7.241358641358642, + "grad_norm": 0.482421875, + "learning_rate": 0.0002, + "loss": 0.8843, + "step": 9066 + }, + { + "epoch": 7.242157842157842, + "grad_norm": 0.7421875, + "learning_rate": 0.0002, + "loss": 0.8893, + "step": 9067 + }, + { + "epoch": 7.242957042957043, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.8859, + "step": 9068 + }, + { + "epoch": 7.243756243756244, + "grad_norm": 0.83203125, + "learning_rate": 0.0002, + "loss": 0.8982, + "step": 9069 + }, + { + "epoch": 7.244555444555445, + "grad_norm": 0.5390625, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 9070 + }, + { + "epoch": 7.245354645354645, + "grad_norm": 0.80859375, + "learning_rate": 0.0002, + "loss": 0.8963, + "step": 9071 + }, + { + "epoch": 7.246153846153846, + "grad_norm": 0.53125, + "learning_rate": 0.0002, + "loss": 0.8891, + "step": 9072 + }, + { + "epoch": 7.246953046953047, + "grad_norm": 0.703125, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 9073 + }, + { + "epoch": 7.247752247752247, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.8833, + "step": 9074 + }, + { + "epoch": 7.248551448551448, + "grad_norm": 0.62890625, + "learning_rate": 0.0002, + "loss": 0.8832, + "step": 9075 + }, + { + "epoch": 7.249350649350649, + "grad_norm": 0.51171875, + "learning_rate": 0.0002, + "loss": 0.8936, + "step": 9076 + }, + { + "epoch": 7.2501498501498505, + "grad_norm": 0.56640625, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 9077 + }, + { + "epoch": 7.250949050949051, + "grad_norm": 0.515625, + "learning_rate": 0.0002, + "loss": 0.8849, + "step": 9078 + }, + { + "epoch": 7.251748251748252, + "grad_norm": 0.53515625, + "learning_rate": 0.0002, + "loss": 0.8859, + "step": 9079 + }, + { + "epoch": 7.252547452547453, + "grad_norm": 0.451171875, + "learning_rate": 0.0002, + "loss": 0.8819, + "step": 9080 + }, + { + "epoch": 7.253346653346654, + "grad_norm": 0.52734375, + "learning_rate": 0.0002, + "loss": 0.8842, + "step": 9081 + }, + { + "epoch": 7.254145854145854, + "grad_norm": 0.458984375, + "learning_rate": 0.0002, + "loss": 0.8838, + "step": 9082 + }, + { + "epoch": 7.254945054945055, + "grad_norm": 0.50390625, + "learning_rate": 0.0002, + "loss": 0.8837, + "step": 9083 + }, + { + "epoch": 7.255744255744256, + "grad_norm": 0.455078125, + "learning_rate": 0.0002, + "loss": 0.894, + "step": 9084 + }, + { + "epoch": 7.256543456543456, + "grad_norm": 0.478515625, + "learning_rate": 0.0002, + "loss": 0.8835, + "step": 9085 + }, + { + "epoch": 7.257342657342657, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.8818, + "step": 9086 + }, + { + "epoch": 7.258141858141858, + "grad_norm": 0.49609375, + "learning_rate": 0.0002, + "loss": 0.8831, + "step": 9087 + }, + { + "epoch": 7.258941058941059, + "grad_norm": 0.41015625, + "learning_rate": 0.0002, + "loss": 0.8757, + "step": 9088 + }, + { + "epoch": 7.259740259740259, + "grad_norm": 0.4375, + "learning_rate": 0.0002, + "loss": 0.8839, + "step": 9089 + }, + { + "epoch": 7.26053946053946, + "grad_norm": 0.390625, + "learning_rate": 0.0002, + "loss": 0.8822, + "step": 9090 + }, + { + "epoch": 7.2613386613386615, + "grad_norm": 1.8203125, + "learning_rate": 0.0002, + "loss": 0.897, + "step": 9091 + }, + { + "epoch": 7.2621378621378625, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.8788, + "step": 9092 + }, + { + "epoch": 7.262937062937063, + "grad_norm": 1.2890625, + "learning_rate": 0.0002, + "loss": 0.9082, + "step": 9093 + }, + { + "epoch": 7.263736263736264, + "grad_norm": 0.431640625, + "learning_rate": 0.0002, + "loss": 0.8917, + "step": 9094 + }, + { + "epoch": 7.264535464535465, + "grad_norm": 0.314453125, + "learning_rate": 0.0002, + "loss": 0.8867, + "step": 9095 + }, + { + "epoch": 7.265334665334665, + "grad_norm": 0.40625, + "learning_rate": 0.0002, + "loss": 0.8884, + "step": 9096 + }, + { + "epoch": 7.266133866133866, + "grad_norm": 0.34375, + "learning_rate": 0.0002, + "loss": 0.8933, + "step": 9097 + }, + { + "epoch": 7.266933066933067, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8951, + "step": 9098 + }, + { + "epoch": 7.267732267732268, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 9099 + }, + { + "epoch": 7.268531468531468, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8856, + "step": 9100 + }, + { + "epoch": 7.269330669330669, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8858, + "step": 9101 + }, + { + "epoch": 7.27012987012987, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8827, + "step": 9102 + }, + { + "epoch": 7.270929070929071, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8776, + "step": 9103 + }, + { + "epoch": 7.271728271728271, + "grad_norm": 0.404296875, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 9104 + }, + { + "epoch": 7.2725274725274724, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8913, + "step": 9105 + }, + { + "epoch": 7.2733266733266735, + "grad_norm": 0.412109375, + "learning_rate": 0.0002, + "loss": 0.8795, + "step": 9106 + }, + { + "epoch": 7.2741258741258745, + "grad_norm": 0.365234375, + "learning_rate": 0.0002, + "loss": 0.8763, + "step": 9107 + }, + { + "epoch": 7.274925074925075, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 9108 + }, + { + "epoch": 7.275724275724276, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 9109 + }, + { + "epoch": 7.276523476523477, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8748, + "step": 9110 + }, + { + "epoch": 7.277322677322678, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8912, + "step": 9111 + }, + { + "epoch": 7.278121878121878, + "grad_norm": 0.396484375, + "learning_rate": 0.0002, + "loss": 0.8767, + "step": 9112 + }, + { + "epoch": 7.278921078921079, + "grad_norm": 0.40234375, + "learning_rate": 0.0002, + "loss": 0.8799, + "step": 9113 + }, + { + "epoch": 7.27972027972028, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 9114 + }, + { + "epoch": 7.28051948051948, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.897, + "step": 9115 + }, + { + "epoch": 7.281318681318681, + "grad_norm": 0.369140625, + "learning_rate": 0.0002, + "loss": 0.881, + "step": 9116 + }, + { + "epoch": 7.282117882117882, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.8831, + "step": 9117 + }, + { + "epoch": 7.282917082917083, + "grad_norm": 0.392578125, + "learning_rate": 0.0002, + "loss": 0.8915, + "step": 9118 + }, + { + "epoch": 7.283716283716283, + "grad_norm": 0.29296875, + "learning_rate": 0.0002, + "loss": 0.8916, + "step": 9119 + }, + { + "epoch": 7.2845154845154845, + "grad_norm": 0.3828125, + "learning_rate": 0.0002, + "loss": 0.8796, + "step": 9120 + }, + { + "epoch": 7.2853146853146855, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8903, + "step": 9121 + }, + { + "epoch": 7.2861138861138866, + "grad_norm": 0.375, + "learning_rate": 0.0002, + "loss": 0.8792, + "step": 9122 + }, + { + "epoch": 7.286913086913087, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8838, + "step": 9123 + }, + { + "epoch": 7.287712287712288, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.8879, + "step": 9124 + }, + { + "epoch": 7.288511488511489, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8815, + "step": 9125 + }, + { + "epoch": 7.289310689310689, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8833, + "step": 9126 + }, + { + "epoch": 7.29010989010989, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8765, + "step": 9127 + }, + { + "epoch": 7.290909090909091, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.886, + "step": 9128 + }, + { + "epoch": 7.291708291708292, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8939, + "step": 9129 + }, + { + "epoch": 7.292507492507492, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.8825, + "step": 9130 + }, + { + "epoch": 7.293306693306693, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8923, + "step": 9131 + }, + { + "epoch": 7.294105894105894, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8803, + "step": 9132 + }, + { + "epoch": 7.294905094905095, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.883, + "step": 9133 + }, + { + "epoch": 7.2957042957042955, + "grad_norm": 1.9609375, + "learning_rate": 0.0002, + "loss": 0.9017, + "step": 9134 + }, + { + "epoch": 7.2965034965034965, + "grad_norm": 0.30078125, + "learning_rate": 0.0002, + "loss": 0.8909, + "step": 9135 + }, + { + "epoch": 7.2973026973026975, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8828, + "step": 9136 + }, + { + "epoch": 7.298101898101898, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8873, + "step": 9137 + }, + { + "epoch": 7.298901098901099, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8787, + "step": 9138 + }, + { + "epoch": 7.2997002997003, + "grad_norm": 0.23828125, + "learning_rate": 0.0002, + "loss": 0.8897, + "step": 9139 + }, + { + "epoch": 7.300499500499501, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 9140 + }, + { + "epoch": 7.301298701298701, + "grad_norm": 0.5234375, + "learning_rate": 0.0002, + "loss": 0.9, + "step": 9141 + }, + { + "epoch": 7.302097902097902, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8816, + "step": 9142 + }, + { + "epoch": 7.302897102897103, + "grad_norm": 0.2470703125, + "learning_rate": 0.0002, + "loss": 0.8821, + "step": 9143 + }, + { + "epoch": 7.303696303696304, + "grad_norm": 0.3359375, + "learning_rate": 0.0002, + "loss": 0.8854, + "step": 9144 + }, + { + "epoch": 7.304495504495504, + "grad_norm": 0.5078125, + "learning_rate": 0.0002, + "loss": 0.8869, + "step": 9145 + }, + { + "epoch": 7.305294705294705, + "grad_norm": 0.287109375, + "learning_rate": 0.0002, + "loss": 0.8983, + "step": 9146 + }, + { + "epoch": 7.306093906093906, + "grad_norm": 0.359375, + "learning_rate": 0.0002, + "loss": 0.8895, + "step": 9147 + }, + { + "epoch": 7.3068931068931064, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8853, + "step": 9148 + }, + { + "epoch": 7.3076923076923075, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 9149 + }, + { + "epoch": 7.3084915084915085, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8859, + "step": 9150 + }, + { + "epoch": 7.30929070929071, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 9151 + }, + { + "epoch": 7.31008991008991, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8805, + "step": 9152 + }, + { + "epoch": 7.310889110889111, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.887, + "step": 9153 + }, + { + "epoch": 7.311688311688312, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 9154 + }, + { + "epoch": 7.312487512487513, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8904, + "step": 9155 + }, + { + "epoch": 7.313286713286713, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.882, + "step": 9156 + }, + { + "epoch": 7.314085914085914, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.8959, + "step": 9157 + }, + { + "epoch": 7.314885114885115, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.897, + "step": 9158 + }, + { + "epoch": 7.315684315684316, + "grad_norm": 0.318359375, + "learning_rate": 0.0002, + "loss": 0.8852, + "step": 9159 + }, + { + "epoch": 7.316483516483516, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.9007, + "step": 9160 + }, + { + "epoch": 7.317282717282717, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8775, + "step": 9161 + }, + { + "epoch": 7.318081918081918, + "grad_norm": 0.42578125, + "learning_rate": 0.0002, + "loss": 0.8955, + "step": 9162 + }, + { + "epoch": 7.3188811188811185, + "grad_norm": 0.421875, + "learning_rate": 0.0002, + "loss": 0.8845, + "step": 9163 + }, + { + "epoch": 7.3196803196803195, + "grad_norm": 0.2734375, + "learning_rate": 0.0002, + "loss": 0.8845, + "step": 9164 + }, + { + "epoch": 7.3204795204795206, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8886, + "step": 9165 + }, + { + "epoch": 7.321278721278722, + "grad_norm": 0.296875, + "learning_rate": 0.0002, + "loss": 0.8826, + "step": 9166 + }, + { + "epoch": 7.322077922077922, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8816, + "step": 9167 + }, + { + "epoch": 7.322877122877123, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8832, + "step": 9168 + }, + { + "epoch": 7.323676323676324, + "grad_norm": 0.255859375, + "learning_rate": 0.0002, + "loss": 0.8839, + "step": 9169 + }, + { + "epoch": 7.324475524475525, + "grad_norm": 0.28515625, + "learning_rate": 0.0002, + "loss": 0.8891, + "step": 9170 + }, + { + "epoch": 7.325274725274725, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8818, + "step": 9171 + }, + { + "epoch": 7.326073926073926, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8869, + "step": 9172 + }, + { + "epoch": 7.326873126873127, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8885, + "step": 9173 + }, + { + "epoch": 7.327672327672328, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.888, + "step": 9174 + }, + { + "epoch": 7.328471528471528, + "grad_norm": 0.28125, + "learning_rate": 0.0002, + "loss": 0.8844, + "step": 9175 + }, + { + "epoch": 7.329270729270729, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.893, + "step": 9176 + }, + { + "epoch": 7.33006993006993, + "grad_norm": 0.279296875, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 9177 + }, + { + "epoch": 7.3308691308691305, + "grad_norm": 0.2392578125, + "learning_rate": 0.0002, + "loss": 0.8759, + "step": 9178 + }, + { + "epoch": 7.3316683316683315, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8917, + "step": 9179 + }, + { + "epoch": 7.332467532467533, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.8942, + "step": 9180 + }, + { + "epoch": 7.333266733266734, + "grad_norm": 0.2451171875, + "learning_rate": 0.0002, + "loss": 0.8795, + "step": 9181 + }, + { + "epoch": 7.334065934065934, + "grad_norm": 0.27734375, + "learning_rate": 0.0002, + "loss": 0.8875, + "step": 9182 + }, + { + "epoch": 7.334865134865135, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8861, + "step": 9183 + }, + { + "epoch": 7.335664335664336, + "grad_norm": 0.2421875, + "learning_rate": 0.0002, + "loss": 0.8765, + "step": 9184 + }, + { + "epoch": 7.336463536463537, + "grad_norm": 0.291015625, + "learning_rate": 0.0002, + "loss": 0.8891, + "step": 9185 + }, + { + "epoch": 7.337262737262737, + "grad_norm": 0.24609375, + "learning_rate": 0.0002, + "loss": 0.889, + "step": 9186 + }, + { + "epoch": 7.338061938061938, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8833, + "step": 9187 + }, + { + "epoch": 7.338861138861139, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8881, + "step": 9188 + }, + { + "epoch": 7.339660339660339, + "grad_norm": 0.236328125, + "learning_rate": 0.0002, + "loss": 0.8835, + "step": 9189 + }, + { + "epoch": 7.34045954045954, + "grad_norm": 0.2578125, + "learning_rate": 0.0002, + "loss": 0.8851, + "step": 9190 + }, + { + "epoch": 7.341258741258741, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8779, + "step": 9191 + }, + { + "epoch": 7.342057942057942, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8851, + "step": 9192 + }, + { + "epoch": 7.3428571428571425, + "grad_norm": 0.2890625, + "learning_rate": 0.0002, + "loss": 0.8831, + "step": 9193 + }, + { + "epoch": 7.343656343656344, + "grad_norm": 0.240234375, + "learning_rate": 0.0002, + "loss": 0.8816, + "step": 9194 + }, + { + "epoch": 7.344455544455545, + "grad_norm": 2.734375, + "learning_rate": 0.0002, + "loss": 0.9191, + "step": 9195 + }, + { + "epoch": 7.345254745254746, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.8858, + "step": 9196 + }, + { + "epoch": 7.346053946053946, + "grad_norm": 0.37890625, + "learning_rate": 0.0002, + "loss": 0.8871, + "step": 9197 + }, + { + "epoch": 7.346853146853147, + "grad_norm": 0.265625, + "learning_rate": 0.0002, + "loss": 0.8962, + "step": 9198 + }, + { + "epoch": 7.347652347652348, + "grad_norm": 0.3984375, + "learning_rate": 0.0002, + "loss": 0.882, + "step": 9199 + }, + { + "epoch": 7.348451548451548, + "grad_norm": 0.474609375, + "learning_rate": 0.0002, + "loss": 0.8742, + "step": 9200 + }, + { + "epoch": 7.349250749250749, + "grad_norm": 0.267578125, + "learning_rate": 0.0002, + "loss": 0.8926, + "step": 9201 + }, + { + "epoch": 7.35004995004995, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8851, + "step": 9202 + }, + { + "epoch": 7.350849150849151, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8849, + "step": 9203 + }, + { + "epoch": 7.351648351648351, + "grad_norm": 0.447265625, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 9204 + }, + { + "epoch": 7.352447552447552, + "grad_norm": 0.275390625, + "learning_rate": 0.0002, + "loss": 0.8928, + "step": 9205 + }, + { + "epoch": 7.353246753246753, + "grad_norm": 0.443359375, + "learning_rate": 0.0002, + "loss": 0.8851, + "step": 9206 + }, + { + "epoch": 7.354045954045954, + "grad_norm": 0.32421875, + "learning_rate": 0.0002, + "loss": 0.8805, + "step": 9207 + }, + { + "epoch": 7.3548451548451546, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8775, + "step": 9208 + }, + { + "epoch": 7.355644355644356, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.8851, + "step": 9209 + }, + { + "epoch": 7.356443556443557, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8899, + "step": 9210 + }, + { + "epoch": 7.357242757242757, + "grad_norm": 0.353515625, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 9211 + }, + { + "epoch": 7.358041958041958, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.8832, + "step": 9212 + }, + { + "epoch": 7.358841158841159, + "grad_norm": 0.259765625, + "learning_rate": 0.0002, + "loss": 0.8923, + "step": 9213 + }, + { + "epoch": 7.35964035964036, + "grad_norm": 0.3046875, + "learning_rate": 0.0002, + "loss": 0.8856, + "step": 9214 + }, + { + "epoch": 7.36043956043956, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8794, + "step": 9215 + }, + { + "epoch": 7.361238761238761, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8943, + "step": 9216 + }, + { + "epoch": 7.362037962037962, + "grad_norm": 0.25, + "learning_rate": 0.0002, + "loss": 0.8805, + "step": 9217 + }, + { + "epoch": 7.362837162837163, + "grad_norm": 0.302734375, + "learning_rate": 0.0002, + "loss": 0.8931, + "step": 9218 + }, + { + "epoch": 7.363636363636363, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8918, + "step": 9219 + }, + { + "epoch": 7.364435564435564, + "grad_norm": 0.2333984375, + "learning_rate": 0.0002, + "loss": 0.8981, + "step": 9220 + }, + { + "epoch": 7.365234765234765, + "grad_norm": 0.337890625, + "learning_rate": 0.0002, + "loss": 0.8896, + "step": 9221 + }, + { + "epoch": 7.366033966033966, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.88, + "step": 9222 + }, + { + "epoch": 7.366833166833167, + "grad_norm": 0.248046875, + "learning_rate": 0.0002, + "loss": 0.8836, + "step": 9223 + }, + { + "epoch": 7.367632367632368, + "grad_norm": 0.310546875, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 9224 + }, + { + "epoch": 7.368431568431569, + "grad_norm": 0.294921875, + "learning_rate": 0.0002, + "loss": 0.8848, + "step": 9225 + }, + { + "epoch": 7.36923076923077, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8801, + "step": 9226 + }, + { + "epoch": 7.37002997002997, + "grad_norm": 0.306640625, + "learning_rate": 0.0002, + "loss": 0.8906, + "step": 9227 + }, + { + "epoch": 7.370829170829171, + "grad_norm": 0.25390625, + "learning_rate": 0.0002, + "loss": 0.8804, + "step": 9228 + }, + { + "epoch": 7.371628371628372, + "grad_norm": 0.349609375, + "learning_rate": 0.0002, + "loss": 0.875, + "step": 9229 + }, + { + "epoch": 7.372427572427572, + "grad_norm": 0.26171875, + "learning_rate": 0.0002, + "loss": 0.8832, + "step": 9230 + }, + { + "epoch": 7.373226773226773, + "grad_norm": 0.30859375, + "learning_rate": 0.0002, + "loss": 0.8874, + "step": 9231 + }, + { + "epoch": 7.374025974025974, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8887, + "step": 9232 + }, + { + "epoch": 7.374825174825175, + "grad_norm": 0.263671875, + "learning_rate": 0.0002, + "loss": 0.8817, + "step": 9233 + }, + { + "epoch": 7.375624375624375, + "grad_norm": 0.33203125, + "learning_rate": 0.0002, + "loss": 0.8882, + "step": 9234 + }, + { + "epoch": 7.376423576423576, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.8941, + "step": 9235 + }, + { + "epoch": 7.377222777222777, + "grad_norm": 0.322265625, + "learning_rate": 0.0002, + "loss": 0.8804, + "step": 9236 + }, + { + "epoch": 7.3780219780219785, + "grad_norm": 0.328125, + "learning_rate": 0.0002, + "loss": 0.8921, + "step": 9237 + }, + { + "epoch": 7.378821178821179, + "grad_norm": 0.373046875, + "learning_rate": 0.0002, + "loss": 0.8946, + "step": 9238 + }, + { + "epoch": 7.37962037962038, + "grad_norm": 0.44921875, + "learning_rate": 0.0002, + "loss": 0.8898, + "step": 9239 + }, + { + "epoch": 7.380419580419581, + "grad_norm": 0.35546875, + "learning_rate": 0.0002, + "loss": 0.8859, + "step": 9240 + }, + { + "epoch": 7.381218781218781, + "grad_norm": 0.298828125, + "learning_rate": 0.0002, + "loss": 0.8824, + "step": 9241 + }, + { + "epoch": 7.382017982017982, + "grad_norm": 0.3203125, + "learning_rate": 0.0002, + "loss": 0.8891, + "step": 9242 + }, + { + "epoch": 7.382817182817183, + "grad_norm": 0.361328125, + "learning_rate": 0.0002, + "loss": 0.8907, + "step": 9243 + }, + { + "epoch": 7.383616383616384, + "grad_norm": 0.4765625, + "learning_rate": 0.0002, + "loss": 0.8804, + "step": 9244 + }, + { + "epoch": 7.384415584415584, + "grad_norm": 0.26953125, + "learning_rate": 0.0002, + "loss": 0.8856, + "step": 9245 + }, + { + "epoch": 7.385214785214785, + "grad_norm": 0.408203125, + "learning_rate": 0.0002, + "loss": 0.8947, + "step": 9246 + }, + { + "epoch": 7.386013986013986, + "grad_norm": 0.283203125, + "learning_rate": 0.0002, + "loss": 0.8984, + "step": 9247 + }, + { + "epoch": 7.386813186813187, + "grad_norm": 0.326171875, + "learning_rate": 0.0002, + "loss": 0.8868, + "step": 9248 + }, + { + "epoch": 7.387612387612387, + "grad_norm": 0.3671875, + "learning_rate": 0.0002, + "loss": 0.8922, + "step": 9249 + }, + { + "epoch": 7.388411588411588, + "grad_norm": 0.251953125, + "learning_rate": 0.0002, + "loss": 0.8809, + "step": 9250 + } + ], + "logging_steps": 1.0, + "max_steps": 750600, + "num_input_tokens_seen": 0, + "num_train_epochs": 600, + "save_steps": 250, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 4.0602973151585894e+18, + "train_batch_size": 64, + "trial_name": null, + "trial_params": null +}