|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.49996528018887576, |
|
"eval_steps": 1000, |
|
"global_step": 7200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 6.943962224845496e-05, |
|
"grad_norm": 9.8125, |
|
"learning_rate": 2e-06, |
|
"loss": 0.7188, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006943962224845497, |
|
"grad_norm": 0.1376953125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3907, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.013887924449690994, |
|
"grad_norm": 0.0849609375, |
|
"learning_rate": 0.0004, |
|
"loss": 0.2341, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.02083188667453649, |
|
"grad_norm": 0.146484375, |
|
"learning_rate": 0.0006, |
|
"loss": 0.2181, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.027775848899381988, |
|
"grad_norm": 0.10546875, |
|
"learning_rate": 0.0008, |
|
"loss": 0.197, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.034719811124227486, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 0.001, |
|
"loss": 0.1695, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.04166377334907298, |
|
"grad_norm": 0.0849609375, |
|
"learning_rate": 0.0012, |
|
"loss": 0.14, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.04860773557391848, |
|
"grad_norm": 0.078125, |
|
"learning_rate": 0.0014, |
|
"loss": 0.1231, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.055551697798763976, |
|
"grad_norm": 0.0712890625, |
|
"learning_rate": 0.0016, |
|
"loss": 0.1141, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.06249566002360947, |
|
"grad_norm": 0.06298828125, |
|
"learning_rate": 0.0018000000000000002, |
|
"loss": 0.1077, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.06943962224845497, |
|
"grad_norm": 0.0615234375, |
|
"learning_rate": 0.002, |
|
"loss": 0.1045, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.06943962224845497, |
|
"eval_covost2-en-de_loss": 1.4858413934707642, |
|
"eval_covost2-en-de_runtime": 32.1589, |
|
"eval_covost2-en-de_samples_per_second": 1.99, |
|
"eval_covost2-en-de_steps_per_second": 0.062, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.06943962224845497, |
|
"eval_covost2-zh-en_loss": 2.7152516841888428, |
|
"eval_covost2-zh-en_runtime": 31.3842, |
|
"eval_covost2-zh-en_samples_per_second": 2.039, |
|
"eval_covost2-zh-en_steps_per_second": 0.064, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.06943962224845497, |
|
"eval_peoplespeech-clean-transcription_loss": 2.0398874282836914, |
|
"eval_peoplespeech-clean-transcription_runtime": 32.088, |
|
"eval_peoplespeech-clean-transcription_samples_per_second": 1.995, |
|
"eval_peoplespeech-clean-transcription_steps_per_second": 0.062, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.07638358447330046, |
|
"grad_norm": 0.053955078125, |
|
"learning_rate": 0.001999725185109816, |
|
"loss": 0.101, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.08332754669814596, |
|
"grad_norm": 0.0517578125, |
|
"learning_rate": 0.0019989008914857113, |
|
"loss": 0.0956, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.09027150892299146, |
|
"grad_norm": 0.04443359375, |
|
"learning_rate": 0.00199752757218401, |
|
"loss": 0.0936, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.09721547114783696, |
|
"grad_norm": 0.0390625, |
|
"learning_rate": 0.001995605982021898, |
|
"loss": 0.0917, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.10415943337268245, |
|
"grad_norm": 0.0517578125, |
|
"learning_rate": 0.0019931371771625545, |
|
"loss": 0.0894, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.11110339559752795, |
|
"grad_norm": 0.0419921875, |
|
"learning_rate": 0.001990122514534651, |
|
"loss": 0.0868, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.11804735782237345, |
|
"grad_norm": 0.039306640625, |
|
"learning_rate": 0.0019865636510865464, |
|
"loss": 0.0861, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.12499132004721894, |
|
"grad_norm": 0.047119140625, |
|
"learning_rate": 0.001982462542875576, |
|
"loss": 0.0854, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.13193528227206444, |
|
"grad_norm": 0.0390625, |
|
"learning_rate": 0.001977821443992945, |
|
"loss": 0.0837, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.13887924449690994, |
|
"grad_norm": 0.04052734375, |
|
"learning_rate": 0.001972642905324813, |
|
"loss": 0.0818, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.13887924449690994, |
|
"eval_covost2-en-de_loss": 1.4137890338897705, |
|
"eval_covost2-en-de_runtime": 32.5714, |
|
"eval_covost2-en-de_samples_per_second": 1.965, |
|
"eval_covost2-en-de_steps_per_second": 0.061, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.13887924449690994, |
|
"eval_covost2-zh-en_loss": 2.667837381362915, |
|
"eval_covost2-zh-en_runtime": 31.1685, |
|
"eval_covost2-zh-en_samples_per_second": 2.053, |
|
"eval_covost2-zh-en_steps_per_second": 0.064, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.13887924449690994, |
|
"eval_peoplespeech-clean-transcription_loss": 1.835880160331726, |
|
"eval_peoplespeech-clean-transcription_runtime": 32.0265, |
|
"eval_peoplespeech-clean-transcription_samples_per_second": 1.998, |
|
"eval_peoplespeech-clean-transcription_steps_per_second": 0.062, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.14582320672175544, |
|
"grad_norm": 0.039794921875, |
|
"learning_rate": 0.0019669297731502505, |
|
"loss": 0.0813, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.15276716894660092, |
|
"grad_norm": 0.03515625, |
|
"learning_rate": 0.00196068518757684, |
|
"loss": 0.0811, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.15971113117144642, |
|
"grad_norm": 0.04443359375, |
|
"learning_rate": 0.001953912580814779, |
|
"loss": 0.0793, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.16665509339629192, |
|
"grad_norm": 0.037841796875, |
|
"learning_rate": 0.0019466156752904343, |
|
"loss": 0.0788, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.17359905562113742, |
|
"grad_norm": 0.04052734375, |
|
"learning_rate": 0.0019387984816003866, |
|
"loss": 0.0783, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.18054301784598292, |
|
"grad_norm": 0.03466796875, |
|
"learning_rate": 0.0019304652963070869, |
|
"loss": 0.0772, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.18748698007082842, |
|
"grad_norm": 0.036376953125, |
|
"learning_rate": 0.0019216206995773372, |
|
"loss": 0.0771, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.19443094229567393, |
|
"grad_norm": 0.0400390625, |
|
"learning_rate": 0.0019122695526648968, |
|
"loss": 0.0766, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.2013749045205194, |
|
"grad_norm": 0.0361328125, |
|
"learning_rate": 0.0019024169952385887, |
|
"loss": 0.0753, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.2083188667453649, |
|
"grad_norm": 0.03125, |
|
"learning_rate": 0.0018920684425573864, |
|
"loss": 0.075, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.2083188667453649, |
|
"eval_covost2-en-de_loss": 1.3828500509262085, |
|
"eval_covost2-en-de_runtime": 32.2972, |
|
"eval_covost2-en-de_samples_per_second": 1.982, |
|
"eval_covost2-en-de_steps_per_second": 0.062, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.2083188667453649, |
|
"eval_covost2-zh-en_loss": 2.646721839904785, |
|
"eval_covost2-zh-en_runtime": 31.0128, |
|
"eval_covost2-zh-en_samples_per_second": 2.064, |
|
"eval_covost2-zh-en_steps_per_second": 0.064, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.2083188667453649, |
|
"eval_peoplespeech-clean-transcription_loss": 1.773127555847168, |
|
"eval_peoplespeech-clean-transcription_runtime": 32.5192, |
|
"eval_peoplespeech-clean-transcription_samples_per_second": 1.968, |
|
"eval_peoplespeech-clean-transcription_steps_per_second": 0.062, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.2152628289702104, |
|
"grad_norm": 0.03369140625, |
|
"learning_rate": 0.0018812295824940284, |
|
"loss": 0.0743, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.2222067911950559, |
|
"grad_norm": 0.034912109375, |
|
"learning_rate": 0.0018699063724087904, |
|
"loss": 0.074, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.2291507534199014, |
|
"grad_norm": 0.033203125, |
|
"learning_rate": 0.0018581050358751443, |
|
"loss": 0.0742, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.2360947156447469, |
|
"grad_norm": 0.03857421875, |
|
"learning_rate": 0.0018458320592590974, |
|
"loss": 0.0742, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.24303867786959238, |
|
"grad_norm": 0.033935546875, |
|
"learning_rate": 0.0018330941881540914, |
|
"loss": 0.0728, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.24998264009443788, |
|
"grad_norm": 0.031005859375, |
|
"learning_rate": 0.0018198984236734246, |
|
"loss": 0.0728, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.2569266023192834, |
|
"grad_norm": 0.030029296875, |
|
"learning_rate": 0.0018062520186022297, |
|
"loss": 0.0714, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.2638705645441289, |
|
"grad_norm": 0.02734375, |
|
"learning_rate": 0.0017921624734111292, |
|
"loss": 0.071, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.2708145267689744, |
|
"grad_norm": 0.03271484375, |
|
"learning_rate": 0.001777637532133752, |
|
"loss": 0.0705, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.2777584889938199, |
|
"grad_norm": 0.035400390625, |
|
"learning_rate": 0.0017626851781103819, |
|
"loss": 0.0714, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.2777584889938199, |
|
"eval_covost2-en-de_loss": 1.3778624534606934, |
|
"eval_covost2-en-de_runtime": 32.6181, |
|
"eval_covost2-en-de_samples_per_second": 1.962, |
|
"eval_covost2-en-de_steps_per_second": 0.061, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.2777584889938199, |
|
"eval_covost2-zh-en_loss": 2.6438870429992676, |
|
"eval_covost2-zh-en_runtime": 31.4603, |
|
"eval_covost2-zh-en_samples_per_second": 2.034, |
|
"eval_covost2-zh-en_steps_per_second": 0.064, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.2777584889938199, |
|
"eval_peoplespeech-clean-transcription_loss": 1.7361584901809692, |
|
"eval_peoplespeech-clean-transcription_runtime": 32.442, |
|
"eval_peoplespeech-clean-transcription_samples_per_second": 1.973, |
|
"eval_peoplespeech-clean-transcription_steps_per_second": 0.062, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.2847024512186654, |
|
"grad_norm": 0.0281982421875, |
|
"learning_rate": 0.001747313629600077, |
|
"loss": 0.0713, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.2916464134435109, |
|
"grad_norm": 0.028076171875, |
|
"learning_rate": 0.001731531335263669, |
|
"loss": 0.0699, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.2985903756683564, |
|
"grad_norm": 0.0277099609375, |
|
"learning_rate": 0.0017153469695201276, |
|
"loss": 0.0702, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.30553433789320184, |
|
"grad_norm": 0.031982421875, |
|
"learning_rate": 0.0016987694277788418, |
|
"loss": 0.0692, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.31247830011804734, |
|
"grad_norm": 0.02880859375, |
|
"learning_rate": 0.001681807821550438, |
|
"loss": 0.0686, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.31942226234289284, |
|
"grad_norm": 0.0289306640625, |
|
"learning_rate": 0.0016644714734388218, |
|
"loss": 0.0698, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.32636622456773834, |
|
"grad_norm": 0.029541015625, |
|
"learning_rate": 0.0016467699120171987, |
|
"loss": 0.0683, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.33331018679258384, |
|
"grad_norm": 0.034423828125, |
|
"learning_rate": 0.001628712866590885, |
|
"loss": 0.0687, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.34025414901742934, |
|
"grad_norm": 0.0289306640625, |
|
"learning_rate": 0.0016103102618497923, |
|
"loss": 0.0684, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.34719811124227484, |
|
"grad_norm": 0.0263671875, |
|
"learning_rate": 0.0015915722124135226, |
|
"loss": 0.0681, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.34719811124227484, |
|
"eval_covost2-en-de_loss": 1.3711879253387451, |
|
"eval_covost2-en-de_runtime": 32.6293, |
|
"eval_covost2-en-de_samples_per_second": 1.961, |
|
"eval_covost2-en-de_steps_per_second": 0.061, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.34719811124227484, |
|
"eval_covost2-zh-en_loss": 2.6346511840820312, |
|
"eval_covost2-zh-en_runtime": 32.1513, |
|
"eval_covost2-zh-en_samples_per_second": 1.991, |
|
"eval_covost2-zh-en_steps_per_second": 0.062, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.34719811124227484, |
|
"eval_peoplespeech-clean-transcription_loss": 1.7350472211837769, |
|
"eval_peoplespeech-clean-transcription_runtime": 32.5813, |
|
"eval_peoplespeech-clean-transcription_samples_per_second": 1.964, |
|
"eval_peoplespeech-clean-transcription_steps_per_second": 0.061, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.35414207346712034, |
|
"grad_norm": 0.028076171875, |
|
"learning_rate": 0.001572509017272072, |
|
"loss": 0.0693, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.36108603569196585, |
|
"grad_norm": 0.030517578125, |
|
"learning_rate": 0.0015531311541251993, |
|
"loss": 0.0683, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.36802999791681135, |
|
"grad_norm": 0.031494140625, |
|
"learning_rate": 0.0015334492736235703, |
|
"loss": 0.0677, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.37497396014165685, |
|
"grad_norm": 0.0284423828125, |
|
"learning_rate": 0.0015134741935148419, |
|
"loss": 0.0669, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.38191792236650235, |
|
"grad_norm": 0.0302734375, |
|
"learning_rate": 0.0014932168926979072, |
|
"loss": 0.0669, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.38886188459134785, |
|
"grad_norm": 0.0260009765625, |
|
"learning_rate": 0.0014726885051885652, |
|
"loss": 0.0666, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.3958058468161933, |
|
"grad_norm": 0.033447265625, |
|
"learning_rate": 0.0014519003139999338, |
|
"loss": 0.0659, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.4027498090410388, |
|
"grad_norm": 0.0283203125, |
|
"learning_rate": 0.0014308637449409706, |
|
"loss": 0.0653, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.4096937712658843, |
|
"grad_norm": 0.0283203125, |
|
"learning_rate": 0.0014095903603365066, |
|
"loss": 0.0662, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 0.4166377334907298, |
|
"grad_norm": 0.0267333984375, |
|
"learning_rate": 0.0013880918526722496, |
|
"loss": 0.0665, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.4166377334907298, |
|
"eval_covost2-en-de_loss": 1.3651559352874756, |
|
"eval_covost2-en-de_runtime": 32.5621, |
|
"eval_covost2-en-de_samples_per_second": 1.965, |
|
"eval_covost2-en-de_steps_per_second": 0.061, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.4166377334907298, |
|
"eval_covost2-zh-en_loss": 2.6372551918029785, |
|
"eval_covost2-zh-en_runtime": 31.028, |
|
"eval_covost2-zh-en_samples_per_second": 2.063, |
|
"eval_covost2-zh-en_steps_per_second": 0.064, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.4166377334907298, |
|
"eval_peoplespeech-clean-transcription_loss": 1.7209596633911133, |
|
"eval_peoplespeech-clean-transcription_runtime": 32.6773, |
|
"eval_peoplespeech-clean-transcription_samples_per_second": 1.959, |
|
"eval_peoplespeech-clean-transcription_steps_per_second": 0.061, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.4235816957155753, |
|
"grad_norm": 0.028564453125, |
|
"learning_rate": 0.0013663800381682463, |
|
"loss": 0.0658, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 0.4305256579404208, |
|
"grad_norm": 0.0299072265625, |
|
"learning_rate": 0.0013444668502843329, |
|
"loss": 0.0657, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.4374696201652663, |
|
"grad_norm": 0.0296630859375, |
|
"learning_rate": 0.0013223643331611537, |
|
"loss": 0.0655, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 0.4444135823901118, |
|
"grad_norm": 0.0286865234375, |
|
"learning_rate": 0.001300084635000341, |
|
"loss": 0.0654, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 0.4513575446149573, |
|
"grad_norm": 0.028564453125, |
|
"learning_rate": 0.0012776400013875004, |
|
"loss": 0.0655, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.4583015068398028, |
|
"grad_norm": 0.030029296875, |
|
"learning_rate": 0.0012550427685616766, |
|
"loss": 0.0648, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 0.4652454690646483, |
|
"grad_norm": 0.037109375, |
|
"learning_rate": 0.0012323053566349834, |
|
"loss": 0.0654, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 0.4721894312894938, |
|
"grad_norm": 0.029296875, |
|
"learning_rate": 0.0012094402627661448, |
|
"loss": 0.0643, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 0.47913339351433926, |
|
"grad_norm": 0.030517578125, |
|
"learning_rate": 0.0011864600542916813, |
|
"loss": 0.0646, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 0.48607735573918476, |
|
"grad_norm": 0.037353515625, |
|
"learning_rate": 0.0011633773618185302, |
|
"loss": 0.0642, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.48607735573918476, |
|
"eval_covost2-en-de_loss": 1.3594859838485718, |
|
"eval_covost2-en-de_runtime": 32.6807, |
|
"eval_covost2-en-de_samples_per_second": 1.958, |
|
"eval_covost2-en-de_steps_per_second": 0.061, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.48607735573918476, |
|
"eval_covost2-zh-en_loss": 2.626713514328003, |
|
"eval_covost2-zh-en_runtime": 31.0228, |
|
"eval_covost2-zh-en_samples_per_second": 2.063, |
|
"eval_covost2-zh-en_steps_per_second": 0.064, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.48607735573918476, |
|
"eval_peoplespeech-clean-transcription_loss": 1.693739652633667, |
|
"eval_peoplespeech-clean-transcription_runtime": 31.9776, |
|
"eval_peoplespeech-clean-transcription_samples_per_second": 2.001, |
|
"eval_peoplespeech-clean-transcription_steps_per_second": 0.063, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.49302131796403026, |
|
"grad_norm": 0.02978515625, |
|
"learning_rate": 0.0011402048722818862, |
|
"loss": 0.0656, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 0.49996528018887576, |
|
"grad_norm": 0.0281982421875, |
|
"learning_rate": 0.0011169553219720827, |
|
"loss": 0.064, |
|
"step": 7200 |
|
} |
|
], |
|
"logging_steps": 100, |
|
"max_steps": 14400, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 3600, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.8942929465072026e+18, |
|
"train_batch_size": 96, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|