|
{
|
|
"best_metric": 0.3763063217406202,
|
|
"best_model_checkpoint": "d:\\DataTicon\\Whisper-Khmer-Small\\whisper-khmer\\outputs\\whisper-small-khmer\\checkpoint-5000",
|
|
"epoch": 2.995805871779509,
|
|
"eval_steps": 1000,
|
|
"global_step": 5000,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.005991611743559017,
|
|
"grad_norm": 22.960840225219727,
|
|
"learning_rate": 7.000000000000001e-06,
|
|
"loss": 3.0494,
|
|
"step": 10
|
|
},
|
|
{
|
|
"epoch": 0.011983223487118035,
|
|
"grad_norm": 15.023651123046875,
|
|
"learning_rate": 1.7000000000000003e-05,
|
|
"loss": 2.1354,
|
|
"step": 20
|
|
},
|
|
{
|
|
"epoch": 0.017974835230677052,
|
|
"grad_norm": 20.588504791259766,
|
|
"learning_rate": 2.7000000000000002e-05,
|
|
"loss": 1.7606,
|
|
"step": 30
|
|
},
|
|
{
|
|
"epoch": 0.02396644697423607,
|
|
"grad_norm": 12.844827651977539,
|
|
"learning_rate": 3.7e-05,
|
|
"loss": 1.6323,
|
|
"step": 40
|
|
},
|
|
{
|
|
"epoch": 0.029958058717795086,
|
|
"grad_norm": 21.74739646911621,
|
|
"learning_rate": 4.7e-05,
|
|
"loss": 1.5285,
|
|
"step": 50
|
|
},
|
|
{
|
|
"epoch": 0.035949670461354104,
|
|
"grad_norm": 14.430631637573242,
|
|
"learning_rate": 4.992939277788985e-05,
|
|
"loss": 1.4681,
|
|
"step": 60
|
|
},
|
|
{
|
|
"epoch": 0.041941282204913125,
|
|
"grad_norm": 16.150493621826172,
|
|
"learning_rate": 4.98285253177325e-05,
|
|
"loss": 1.4109,
|
|
"step": 70
|
|
},
|
|
{
|
|
"epoch": 0.04793289394847214,
|
|
"grad_norm": 14.220664024353027,
|
|
"learning_rate": 4.972765785757515e-05,
|
|
"loss": 1.3271,
|
|
"step": 80
|
|
},
|
|
{
|
|
"epoch": 0.05392450569203116,
|
|
"grad_norm": 10.489856719970703,
|
|
"learning_rate": 4.962679039741779e-05,
|
|
"loss": 1.2089,
|
|
"step": 90
|
|
},
|
|
{
|
|
"epoch": 0.05991611743559017,
|
|
"grad_norm": 15.242402076721191,
|
|
"learning_rate": 4.952592293726044e-05,
|
|
"loss": 1.2212,
|
|
"step": 100
|
|
},
|
|
{
|
|
"epoch": 0.0659077291791492,
|
|
"grad_norm": 8.778143882751465,
|
|
"learning_rate": 4.942505547710309e-05,
|
|
"loss": 1.0803,
|
|
"step": 110
|
|
},
|
|
{
|
|
"epoch": 0.07189934092270821,
|
|
"grad_norm": 8.006437301635742,
|
|
"learning_rate": 4.932418801694574e-05,
|
|
"loss": 1.0374,
|
|
"step": 120
|
|
},
|
|
{
|
|
"epoch": 0.07789095266626722,
|
|
"grad_norm": 10.208110809326172,
|
|
"learning_rate": 4.922332055678838e-05,
|
|
"loss": 0.9244,
|
|
"step": 130
|
|
},
|
|
{
|
|
"epoch": 0.08388256440982625,
|
|
"grad_norm": 11.258834838867188,
|
|
"learning_rate": 4.9122453096631024e-05,
|
|
"loss": 0.7758,
|
|
"step": 140
|
|
},
|
|
{
|
|
"epoch": 0.08987417615338526,
|
|
"grad_norm": 11.1365327835083,
|
|
"learning_rate": 4.902158563647368e-05,
|
|
"loss": 0.7568,
|
|
"step": 150
|
|
},
|
|
{
|
|
"epoch": 0.09586578789694428,
|
|
"grad_norm": 9.645950317382812,
|
|
"learning_rate": 4.892071817631632e-05,
|
|
"loss": 0.666,
|
|
"step": 160
|
|
},
|
|
{
|
|
"epoch": 0.10185739964050329,
|
|
"grad_norm": 7.856279373168945,
|
|
"learning_rate": 4.881985071615897e-05,
|
|
"loss": 0.6093,
|
|
"step": 170
|
|
},
|
|
{
|
|
"epoch": 0.10784901138406232,
|
|
"grad_norm": 8.624720573425293,
|
|
"learning_rate": 4.871898325600161e-05,
|
|
"loss": 0.611,
|
|
"step": 180
|
|
},
|
|
{
|
|
"epoch": 0.11384062312762133,
|
|
"grad_norm": 7.039414882659912,
|
|
"learning_rate": 4.861811579584427e-05,
|
|
"loss": 0.5455,
|
|
"step": 190
|
|
},
|
|
{
|
|
"epoch": 0.11983223487118035,
|
|
"grad_norm": 8.36270523071289,
|
|
"learning_rate": 4.851724833568691e-05,
|
|
"loss": 0.5192,
|
|
"step": 200
|
|
},
|
|
{
|
|
"epoch": 0.12582384661473936,
|
|
"grad_norm": 6.983541488647461,
|
|
"learning_rate": 4.841638087552955e-05,
|
|
"loss": 0.5046,
|
|
"step": 210
|
|
},
|
|
{
|
|
"epoch": 0.1318154583582984,
|
|
"grad_norm": 6.642160415649414,
|
|
"learning_rate": 4.83155134153722e-05,
|
|
"loss": 0.5473,
|
|
"step": 220
|
|
},
|
|
{
|
|
"epoch": 0.1378070701018574,
|
|
"grad_norm": 8.442938804626465,
|
|
"learning_rate": 4.821464595521485e-05,
|
|
"loss": 0.5307,
|
|
"step": 230
|
|
},
|
|
{
|
|
"epoch": 0.14379868184541642,
|
|
"grad_norm": 5.670754432678223,
|
|
"learning_rate": 4.81137784950575e-05,
|
|
"loss": 0.4801,
|
|
"step": 240
|
|
},
|
|
{
|
|
"epoch": 0.14979029358897544,
|
|
"grad_norm": 7.541809558868408,
|
|
"learning_rate": 4.801291103490014e-05,
|
|
"loss": 0.4827,
|
|
"step": 250
|
|
},
|
|
{
|
|
"epoch": 0.15578190533253444,
|
|
"grad_norm": 4.786922454833984,
|
|
"learning_rate": 4.791204357474279e-05,
|
|
"loss": 0.4322,
|
|
"step": 260
|
|
},
|
|
{
|
|
"epoch": 0.16177351707609347,
|
|
"grad_norm": 6.316529750823975,
|
|
"learning_rate": 4.781117611458544e-05,
|
|
"loss": 0.4823,
|
|
"step": 270
|
|
},
|
|
{
|
|
"epoch": 0.1677651288196525,
|
|
"grad_norm": 6.494561195373535,
|
|
"learning_rate": 4.771030865442808e-05,
|
|
"loss": 0.4177,
|
|
"step": 280
|
|
},
|
|
{
|
|
"epoch": 0.1737567405632115,
|
|
"grad_norm": 6.1072998046875,
|
|
"learning_rate": 4.760944119427073e-05,
|
|
"loss": 0.4238,
|
|
"step": 290
|
|
},
|
|
{
|
|
"epoch": 0.17974835230677053,
|
|
"grad_norm": 7.656487464904785,
|
|
"learning_rate": 4.750857373411338e-05,
|
|
"loss": 0.3656,
|
|
"step": 300
|
|
},
|
|
{
|
|
"epoch": 0.18573996405032953,
|
|
"grad_norm": 4.906504154205322,
|
|
"learning_rate": 4.740770627395603e-05,
|
|
"loss": 0.4198,
|
|
"step": 310
|
|
},
|
|
{
|
|
"epoch": 0.19173157579388855,
|
|
"grad_norm": 5.329479694366455,
|
|
"learning_rate": 4.730683881379867e-05,
|
|
"loss": 0.3753,
|
|
"step": 320
|
|
},
|
|
{
|
|
"epoch": 0.19772318753744758,
|
|
"grad_norm": 4.617819309234619,
|
|
"learning_rate": 4.720597135364131e-05,
|
|
"loss": 0.3791,
|
|
"step": 330
|
|
},
|
|
{
|
|
"epoch": 0.20371479928100658,
|
|
"grad_norm": 6.305775165557861,
|
|
"learning_rate": 4.710510389348396e-05,
|
|
"loss": 0.3902,
|
|
"step": 340
|
|
},
|
|
{
|
|
"epoch": 0.2097064110245656,
|
|
"grad_norm": 3.827413320541382,
|
|
"learning_rate": 4.700423643332661e-05,
|
|
"loss": 0.3879,
|
|
"step": 350
|
|
},
|
|
{
|
|
"epoch": 0.21569802276812464,
|
|
"grad_norm": 5.891330242156982,
|
|
"learning_rate": 4.690336897316926e-05,
|
|
"loss": 0.3855,
|
|
"step": 360
|
|
},
|
|
{
|
|
"epoch": 0.22168963451168364,
|
|
"grad_norm": 4.497730731964111,
|
|
"learning_rate": 4.68025015130119e-05,
|
|
"loss": 0.3275,
|
|
"step": 370
|
|
},
|
|
{
|
|
"epoch": 0.22768124625524266,
|
|
"grad_norm": 3.898587703704834,
|
|
"learning_rate": 4.670163405285455e-05,
|
|
"loss": 0.3645,
|
|
"step": 380
|
|
},
|
|
{
|
|
"epoch": 0.23367285799880166,
|
|
"grad_norm": 4.55585241317749,
|
|
"learning_rate": 4.66007665926972e-05,
|
|
"loss": 0.3683,
|
|
"step": 390
|
|
},
|
|
{
|
|
"epoch": 0.2396644697423607,
|
|
"grad_norm": 6.788579940795898,
|
|
"learning_rate": 4.649989913253984e-05,
|
|
"loss": 0.3637,
|
|
"step": 400
|
|
},
|
|
{
|
|
"epoch": 0.24565608148591972,
|
|
"grad_norm": 5.430655002593994,
|
|
"learning_rate": 4.639903167238249e-05,
|
|
"loss": 0.3766,
|
|
"step": 410
|
|
},
|
|
{
|
|
"epoch": 0.2516476932294787,
|
|
"grad_norm": 6.993104934692383,
|
|
"learning_rate": 4.629816421222514e-05,
|
|
"loss": 0.3482,
|
|
"step": 420
|
|
},
|
|
{
|
|
"epoch": 0.2576393049730378,
|
|
"grad_norm": 4.983759880065918,
|
|
"learning_rate": 4.619729675206779e-05,
|
|
"loss": 0.3489,
|
|
"step": 430
|
|
},
|
|
{
|
|
"epoch": 0.2636309167165968,
|
|
"grad_norm": 4.883998394012451,
|
|
"learning_rate": 4.609642929191043e-05,
|
|
"loss": 0.3228,
|
|
"step": 440
|
|
},
|
|
{
|
|
"epoch": 0.2696225284601558,
|
|
"grad_norm": 4.701789855957031,
|
|
"learning_rate": 4.599556183175308e-05,
|
|
"loss": 0.3075,
|
|
"step": 450
|
|
},
|
|
{
|
|
"epoch": 0.2756141402037148,
|
|
"grad_norm": 6.912998676300049,
|
|
"learning_rate": 4.589469437159573e-05,
|
|
"loss": 0.3267,
|
|
"step": 460
|
|
},
|
|
{
|
|
"epoch": 0.28160575194727383,
|
|
"grad_norm": 5.111138343811035,
|
|
"learning_rate": 4.579382691143837e-05,
|
|
"loss": 0.3471,
|
|
"step": 470
|
|
},
|
|
{
|
|
"epoch": 0.28759736369083283,
|
|
"grad_norm": 4.039860725402832,
|
|
"learning_rate": 4.569295945128102e-05,
|
|
"loss": 0.2972,
|
|
"step": 480
|
|
},
|
|
{
|
|
"epoch": 0.29358897543439183,
|
|
"grad_norm": 4.554994106292725,
|
|
"learning_rate": 4.559209199112366e-05,
|
|
"loss": 0.3344,
|
|
"step": 490
|
|
},
|
|
{
|
|
"epoch": 0.2995805871779509,
|
|
"grad_norm": 5.8619184494018555,
|
|
"learning_rate": 4.549122453096631e-05,
|
|
"loss": 0.3375,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 0.3055721989215099,
|
|
"grad_norm": 4.201863765716553,
|
|
"learning_rate": 4.539035707080896e-05,
|
|
"loss": 0.2924,
|
|
"step": 510
|
|
},
|
|
{
|
|
"epoch": 0.3115638106650689,
|
|
"grad_norm": 4.574302673339844,
|
|
"learning_rate": 4.528948961065161e-05,
|
|
"loss": 0.3193,
|
|
"step": 520
|
|
},
|
|
{
|
|
"epoch": 0.31755542240862794,
|
|
"grad_norm": 5.951114654541016,
|
|
"learning_rate": 4.518862215049425e-05,
|
|
"loss": 0.3522,
|
|
"step": 530
|
|
},
|
|
{
|
|
"epoch": 0.32354703415218694,
|
|
"grad_norm": 5.023397922515869,
|
|
"learning_rate": 4.50877546903369e-05,
|
|
"loss": 0.3144,
|
|
"step": 540
|
|
},
|
|
{
|
|
"epoch": 0.32953864589574594,
|
|
"grad_norm": 4.05376672744751,
|
|
"learning_rate": 4.498688723017955e-05,
|
|
"loss": 0.3183,
|
|
"step": 550
|
|
},
|
|
{
|
|
"epoch": 0.335530257639305,
|
|
"grad_norm": 3.973388910293579,
|
|
"learning_rate": 4.488601977002219e-05,
|
|
"loss": 0.289,
|
|
"step": 560
|
|
},
|
|
{
|
|
"epoch": 0.341521869382864,
|
|
"grad_norm": 4.413971424102783,
|
|
"learning_rate": 4.478515230986484e-05,
|
|
"loss": 0.3045,
|
|
"step": 570
|
|
},
|
|
{
|
|
"epoch": 0.347513481126423,
|
|
"grad_norm": 4.1166791915893555,
|
|
"learning_rate": 4.468428484970749e-05,
|
|
"loss": 0.2692,
|
|
"step": 580
|
|
},
|
|
{
|
|
"epoch": 0.35350509286998205,
|
|
"grad_norm": 4.370257377624512,
|
|
"learning_rate": 4.458341738955013e-05,
|
|
"loss": 0.3231,
|
|
"step": 590
|
|
},
|
|
{
|
|
"epoch": 0.35949670461354105,
|
|
"grad_norm": 2.8509249687194824,
|
|
"learning_rate": 4.448254992939278e-05,
|
|
"loss": 0.2658,
|
|
"step": 600
|
|
},
|
|
{
|
|
"epoch": 0.36548831635710005,
|
|
"grad_norm": 4.245826244354248,
|
|
"learning_rate": 4.438168246923542e-05,
|
|
"loss": 0.2796,
|
|
"step": 610
|
|
},
|
|
{
|
|
"epoch": 0.37147992810065905,
|
|
"grad_norm": 2.2622575759887695,
|
|
"learning_rate": 4.428081500907808e-05,
|
|
"loss": 0.2793,
|
|
"step": 620
|
|
},
|
|
{
|
|
"epoch": 0.3774715398442181,
|
|
"grad_norm": 5.839344024658203,
|
|
"learning_rate": 4.417994754892072e-05,
|
|
"loss": 0.2948,
|
|
"step": 630
|
|
},
|
|
{
|
|
"epoch": 0.3834631515877771,
|
|
"grad_norm": 4.588991165161133,
|
|
"learning_rate": 4.407908008876337e-05,
|
|
"loss": 0.2899,
|
|
"step": 640
|
|
},
|
|
{
|
|
"epoch": 0.3894547633313361,
|
|
"grad_norm": 4.319995880126953,
|
|
"learning_rate": 4.397821262860601e-05,
|
|
"loss": 0.2889,
|
|
"step": 650
|
|
},
|
|
{
|
|
"epoch": 0.39544637507489516,
|
|
"grad_norm": 4.287087440490723,
|
|
"learning_rate": 4.387734516844866e-05,
|
|
"loss": 0.2961,
|
|
"step": 660
|
|
},
|
|
{
|
|
"epoch": 0.40143798681845416,
|
|
"grad_norm": 2.974928855895996,
|
|
"learning_rate": 4.377647770829131e-05,
|
|
"loss": 0.287,
|
|
"step": 670
|
|
},
|
|
{
|
|
"epoch": 0.40742959856201316,
|
|
"grad_norm": 3.6927688121795654,
|
|
"learning_rate": 4.367561024813395e-05,
|
|
"loss": 0.3006,
|
|
"step": 680
|
|
},
|
|
{
|
|
"epoch": 0.4134212103055722,
|
|
"grad_norm": 2.942836284637451,
|
|
"learning_rate": 4.35747427879766e-05,
|
|
"loss": 0.2678,
|
|
"step": 690
|
|
},
|
|
{
|
|
"epoch": 0.4194128220491312,
|
|
"grad_norm": 3.953315019607544,
|
|
"learning_rate": 4.347387532781925e-05,
|
|
"loss": 0.3063,
|
|
"step": 700
|
|
},
|
|
{
|
|
"epoch": 0.4254044337926902,
|
|
"grad_norm": 3.223515510559082,
|
|
"learning_rate": 4.33730078676619e-05,
|
|
"loss": 0.2575,
|
|
"step": 710
|
|
},
|
|
{
|
|
"epoch": 0.4313960455362493,
|
|
"grad_norm": 4.100767612457275,
|
|
"learning_rate": 4.327214040750454e-05,
|
|
"loss": 0.2581,
|
|
"step": 720
|
|
},
|
|
{
|
|
"epoch": 0.4373876572798083,
|
|
"grad_norm": 3.5469157695770264,
|
|
"learning_rate": 4.317127294734718e-05,
|
|
"loss": 0.2511,
|
|
"step": 730
|
|
},
|
|
{
|
|
"epoch": 0.4433792690233673,
|
|
"grad_norm": 3.4238710403442383,
|
|
"learning_rate": 4.307040548718984e-05,
|
|
"loss": 0.2835,
|
|
"step": 740
|
|
},
|
|
{
|
|
"epoch": 0.44937088076692633,
|
|
"grad_norm": 3.2563302516937256,
|
|
"learning_rate": 4.296953802703248e-05,
|
|
"loss": 0.2559,
|
|
"step": 750
|
|
},
|
|
{
|
|
"epoch": 0.45536249251048533,
|
|
"grad_norm": 3.1551856994628906,
|
|
"learning_rate": 4.286867056687513e-05,
|
|
"loss": 0.2367,
|
|
"step": 760
|
|
},
|
|
{
|
|
"epoch": 0.46135410425404433,
|
|
"grad_norm": 3.8980541229248047,
|
|
"learning_rate": 4.276780310671777e-05,
|
|
"loss": 0.2376,
|
|
"step": 770
|
|
},
|
|
{
|
|
"epoch": 0.46734571599760333,
|
|
"grad_norm": 5.483028888702393,
|
|
"learning_rate": 4.266693564656043e-05,
|
|
"loss": 0.2515,
|
|
"step": 780
|
|
},
|
|
{
|
|
"epoch": 0.4733373277411624,
|
|
"grad_norm": 5.038168907165527,
|
|
"learning_rate": 4.256606818640307e-05,
|
|
"loss": 0.258,
|
|
"step": 790
|
|
},
|
|
{
|
|
"epoch": 0.4793289394847214,
|
|
"grad_norm": 4.069836616516113,
|
|
"learning_rate": 4.246520072624571e-05,
|
|
"loss": 0.2717,
|
|
"step": 800
|
|
},
|
|
{
|
|
"epoch": 0.4853205512282804,
|
|
"grad_norm": 3.8578696250915527,
|
|
"learning_rate": 4.236433326608836e-05,
|
|
"loss": 0.2353,
|
|
"step": 810
|
|
},
|
|
{
|
|
"epoch": 0.49131216297183944,
|
|
"grad_norm": 3.4313695430755615,
|
|
"learning_rate": 4.226346580593101e-05,
|
|
"loss": 0.25,
|
|
"step": 820
|
|
},
|
|
{
|
|
"epoch": 0.49730377471539844,
|
|
"grad_norm": 4.114861488342285,
|
|
"learning_rate": 4.216259834577366e-05,
|
|
"loss": 0.2516,
|
|
"step": 830
|
|
},
|
|
{
|
|
"epoch": 0.5032953864589574,
|
|
"grad_norm": 3.718224048614502,
|
|
"learning_rate": 4.20617308856163e-05,
|
|
"loss": 0.2438,
|
|
"step": 840
|
|
},
|
|
{
|
|
"epoch": 0.5092869982025164,
|
|
"grad_norm": 4.2352070808410645,
|
|
"learning_rate": 4.196086342545895e-05,
|
|
"loss": 0.2427,
|
|
"step": 850
|
|
},
|
|
{
|
|
"epoch": 0.5152786099460755,
|
|
"grad_norm": 3.1277523040771484,
|
|
"learning_rate": 4.18599959653016e-05,
|
|
"loss": 0.2577,
|
|
"step": 860
|
|
},
|
|
{
|
|
"epoch": 0.5212702216896345,
|
|
"grad_norm": 3.432342767715454,
|
|
"learning_rate": 4.175912850514424e-05,
|
|
"loss": 0.2632,
|
|
"step": 870
|
|
},
|
|
{
|
|
"epoch": 0.5272618334331935,
|
|
"grad_norm": 2.9332733154296875,
|
|
"learning_rate": 4.165826104498689e-05,
|
|
"loss": 0.2466,
|
|
"step": 880
|
|
},
|
|
{
|
|
"epoch": 0.5332534451767525,
|
|
"grad_norm": 3.802907705307007,
|
|
"learning_rate": 4.155739358482953e-05,
|
|
"loss": 0.2335,
|
|
"step": 890
|
|
},
|
|
{
|
|
"epoch": 0.5392450569203115,
|
|
"grad_norm": 2.909348726272583,
|
|
"learning_rate": 4.145652612467219e-05,
|
|
"loss": 0.249,
|
|
"step": 900
|
|
},
|
|
{
|
|
"epoch": 0.5452366686638705,
|
|
"grad_norm": 3.496734142303467,
|
|
"learning_rate": 4.135565866451483e-05,
|
|
"loss": 0.2347,
|
|
"step": 910
|
|
},
|
|
{
|
|
"epoch": 0.5512282804074295,
|
|
"grad_norm": 3.264517307281494,
|
|
"learning_rate": 4.125479120435747e-05,
|
|
"loss": 0.2367,
|
|
"step": 920
|
|
},
|
|
{
|
|
"epoch": 0.5572198921509887,
|
|
"grad_norm": 3.3015294075012207,
|
|
"learning_rate": 4.115392374420012e-05,
|
|
"loss": 0.2285,
|
|
"step": 930
|
|
},
|
|
{
|
|
"epoch": 0.5632115038945477,
|
|
"grad_norm": 3.386230945587158,
|
|
"learning_rate": 4.105305628404277e-05,
|
|
"loss": 0.2373,
|
|
"step": 940
|
|
},
|
|
{
|
|
"epoch": 0.5692031156381067,
|
|
"grad_norm": 4.057402610778809,
|
|
"learning_rate": 4.095218882388542e-05,
|
|
"loss": 0.2336,
|
|
"step": 950
|
|
},
|
|
{
|
|
"epoch": 0.5751947273816657,
|
|
"grad_norm": 3.4949593544006348,
|
|
"learning_rate": 4.085132136372806e-05,
|
|
"loss": 0.214,
|
|
"step": 960
|
|
},
|
|
{
|
|
"epoch": 0.5811863391252247,
|
|
"grad_norm": 2.452944278717041,
|
|
"learning_rate": 4.075045390357071e-05,
|
|
"loss": 0.2142,
|
|
"step": 970
|
|
},
|
|
{
|
|
"epoch": 0.5871779508687837,
|
|
"grad_norm": 4.477965831756592,
|
|
"learning_rate": 4.064958644341336e-05,
|
|
"loss": 0.2443,
|
|
"step": 980
|
|
},
|
|
{
|
|
"epoch": 0.5931695626123428,
|
|
"grad_norm": 3.8095345497131348,
|
|
"learning_rate": 4.0548718983256e-05,
|
|
"loss": 0.2074,
|
|
"step": 990
|
|
},
|
|
{
|
|
"epoch": 0.5991611743559018,
|
|
"grad_norm": 3.586312770843506,
|
|
"learning_rate": 4.044785152309865e-05,
|
|
"loss": 0.2352,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 0.5991611743559018,
|
|
"eval_loss": 0.3941349983215332,
|
|
"eval_runtime": 1145.4733,
|
|
"eval_samples_per_second": 1.238,
|
|
"eval_steps_per_second": 0.078,
|
|
"eval_wer": 0.47164639369539146,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 0.6051527860994608,
|
|
"grad_norm": 3.0073084831237793,
|
|
"learning_rate": 4.03469840629413e-05,
|
|
"loss": 0.1915,
|
|
"step": 1010
|
|
},
|
|
{
|
|
"epoch": 0.6111443978430198,
|
|
"grad_norm": 3.476414680480957,
|
|
"learning_rate": 4.024611660278395e-05,
|
|
"loss": 0.2131,
|
|
"step": 1020
|
|
},
|
|
{
|
|
"epoch": 0.6171360095865788,
|
|
"grad_norm": 3.182587146759033,
|
|
"learning_rate": 4.015533588864232e-05,
|
|
"loss": 0.2186,
|
|
"step": 1030
|
|
},
|
|
{
|
|
"epoch": 0.6231276213301378,
|
|
"grad_norm": 2.873277187347412,
|
|
"learning_rate": 4.005446842848497e-05,
|
|
"loss": 0.2182,
|
|
"step": 1040
|
|
},
|
|
{
|
|
"epoch": 0.6291192330736968,
|
|
"grad_norm": 3.587416172027588,
|
|
"learning_rate": 3.995360096832762e-05,
|
|
"loss": 0.2358,
|
|
"step": 1050
|
|
},
|
|
{
|
|
"epoch": 0.6351108448172559,
|
|
"grad_norm": 2.8519115447998047,
|
|
"learning_rate": 3.9852733508170263e-05,
|
|
"loss": 0.202,
|
|
"step": 1060
|
|
},
|
|
{
|
|
"epoch": 0.6411024565608149,
|
|
"grad_norm": 4.239020824432373,
|
|
"learning_rate": 3.975186604801291e-05,
|
|
"loss": 0.252,
|
|
"step": 1070
|
|
},
|
|
{
|
|
"epoch": 0.6470940683043739,
|
|
"grad_norm": 3.040431499481201,
|
|
"learning_rate": 3.965099858785556e-05,
|
|
"loss": 0.2191,
|
|
"step": 1080
|
|
},
|
|
{
|
|
"epoch": 0.6530856800479329,
|
|
"grad_norm": 4.314202785491943,
|
|
"learning_rate": 3.955013112769821e-05,
|
|
"loss": 0.2341,
|
|
"step": 1090
|
|
},
|
|
{
|
|
"epoch": 0.6590772917914919,
|
|
"grad_norm": 4.510042190551758,
|
|
"learning_rate": 3.944926366754085e-05,
|
|
"loss": 0.2149,
|
|
"step": 1100
|
|
},
|
|
{
|
|
"epoch": 0.6650689035350509,
|
|
"grad_norm": 3.784174919128418,
|
|
"learning_rate": 3.9348396207383495e-05,
|
|
"loss": 0.2413,
|
|
"step": 1110
|
|
},
|
|
{
|
|
"epoch": 0.67106051527861,
|
|
"grad_norm": 3.5287516117095947,
|
|
"learning_rate": 3.924752874722615e-05,
|
|
"loss": 0.2268,
|
|
"step": 1120
|
|
},
|
|
{
|
|
"epoch": 0.677052127022169,
|
|
"grad_norm": 3.4882707595825195,
|
|
"learning_rate": 3.914666128706879e-05,
|
|
"loss": 0.2451,
|
|
"step": 1130
|
|
},
|
|
{
|
|
"epoch": 0.683043738765728,
|
|
"grad_norm": 2.9120218753814697,
|
|
"learning_rate": 3.904579382691144e-05,
|
|
"loss": 0.2256,
|
|
"step": 1140
|
|
},
|
|
{
|
|
"epoch": 0.689035350509287,
|
|
"grad_norm": 4.245069980621338,
|
|
"learning_rate": 3.8944926366754084e-05,
|
|
"loss": 0.2168,
|
|
"step": 1150
|
|
},
|
|
{
|
|
"epoch": 0.695026962252846,
|
|
"grad_norm": 2.571373701095581,
|
|
"learning_rate": 3.884405890659674e-05,
|
|
"loss": 0.2323,
|
|
"step": 1160
|
|
},
|
|
{
|
|
"epoch": 0.701018573996405,
|
|
"grad_norm": 3.3030152320861816,
|
|
"learning_rate": 3.874319144643938e-05,
|
|
"loss": 0.2123,
|
|
"step": 1170
|
|
},
|
|
{
|
|
"epoch": 0.7070101857399641,
|
|
"grad_norm": 3.8424742221832275,
|
|
"learning_rate": 3.8642323986282024e-05,
|
|
"loss": 0.2239,
|
|
"step": 1180
|
|
},
|
|
{
|
|
"epoch": 0.7130017974835231,
|
|
"grad_norm": 2.93353271484375,
|
|
"learning_rate": 3.854145652612467e-05,
|
|
"loss": 0.2153,
|
|
"step": 1190
|
|
},
|
|
{
|
|
"epoch": 0.7189934092270821,
|
|
"grad_norm": 5.56205940246582,
|
|
"learning_rate": 3.844058906596732e-05,
|
|
"loss": 0.2362,
|
|
"step": 1200
|
|
},
|
|
{
|
|
"epoch": 0.7249850209706411,
|
|
"grad_norm": 3.792433023452759,
|
|
"learning_rate": 3.833972160580997e-05,
|
|
"loss": 0.2338,
|
|
"step": 1210
|
|
},
|
|
{
|
|
"epoch": 0.7309766327142001,
|
|
"grad_norm": 3.8626973628997803,
|
|
"learning_rate": 3.823885414565261e-05,
|
|
"loss": 0.2245,
|
|
"step": 1220
|
|
},
|
|
{
|
|
"epoch": 0.7369682444577591,
|
|
"grad_norm": 4.392445087432861,
|
|
"learning_rate": 3.813798668549526e-05,
|
|
"loss": 0.2469,
|
|
"step": 1230
|
|
},
|
|
{
|
|
"epoch": 0.7429598562013181,
|
|
"grad_norm": 2.464857578277588,
|
|
"learning_rate": 3.803711922533791e-05,
|
|
"loss": 0.2249,
|
|
"step": 1240
|
|
},
|
|
{
|
|
"epoch": 0.7489514679448772,
|
|
"grad_norm": 4.464147567749023,
|
|
"learning_rate": 3.793625176518055e-05,
|
|
"loss": 0.239,
|
|
"step": 1250
|
|
},
|
|
{
|
|
"epoch": 0.7549430796884362,
|
|
"grad_norm": 3.0969793796539307,
|
|
"learning_rate": 3.78353843050232e-05,
|
|
"loss": 0.2422,
|
|
"step": 1260
|
|
},
|
|
{
|
|
"epoch": 0.7609346914319952,
|
|
"grad_norm": 2.4737939834594727,
|
|
"learning_rate": 3.7734516844865844e-05,
|
|
"loss": 0.2013,
|
|
"step": 1270
|
|
},
|
|
{
|
|
"epoch": 0.7669263031755542,
|
|
"grad_norm": 3.358137845993042,
|
|
"learning_rate": 3.76336493847085e-05,
|
|
"loss": 0.1985,
|
|
"step": 1280
|
|
},
|
|
{
|
|
"epoch": 0.7729179149191132,
|
|
"grad_norm": 2.7187469005584717,
|
|
"learning_rate": 3.753278192455114e-05,
|
|
"loss": 0.2036,
|
|
"step": 1290
|
|
},
|
|
{
|
|
"epoch": 0.7789095266626722,
|
|
"grad_norm": 2.8770973682403564,
|
|
"learning_rate": 3.7431914464393784e-05,
|
|
"loss": 0.1973,
|
|
"step": 1300
|
|
},
|
|
{
|
|
"epoch": 0.7849011384062313,
|
|
"grad_norm": 3.322476387023926,
|
|
"learning_rate": 3.733104700423643e-05,
|
|
"loss": 0.2165,
|
|
"step": 1310
|
|
},
|
|
{
|
|
"epoch": 0.7908927501497903,
|
|
"grad_norm": 3.7058589458465576,
|
|
"learning_rate": 3.723017954407908e-05,
|
|
"loss": 0.2101,
|
|
"step": 1320
|
|
},
|
|
{
|
|
"epoch": 0.7968843618933493,
|
|
"grad_norm": 3.665559768676758,
|
|
"learning_rate": 3.712931208392173e-05,
|
|
"loss": 0.2365,
|
|
"step": 1330
|
|
},
|
|
{
|
|
"epoch": 0.8028759736369083,
|
|
"grad_norm": 2.0677387714385986,
|
|
"learning_rate": 3.702844462376437e-05,
|
|
"loss": 0.2019,
|
|
"step": 1340
|
|
},
|
|
{
|
|
"epoch": 0.8088675853804673,
|
|
"grad_norm": 3.7458584308624268,
|
|
"learning_rate": 3.692757716360702e-05,
|
|
"loss": 0.1937,
|
|
"step": 1350
|
|
},
|
|
{
|
|
"epoch": 0.8148591971240263,
|
|
"grad_norm": 3.561161994934082,
|
|
"learning_rate": 3.682670970344967e-05,
|
|
"loss": 0.2034,
|
|
"step": 1360
|
|
},
|
|
{
|
|
"epoch": 0.8208508088675854,
|
|
"grad_norm": 3.4886398315429688,
|
|
"learning_rate": 3.672584224329231e-05,
|
|
"loss": 0.2074,
|
|
"step": 1370
|
|
},
|
|
{
|
|
"epoch": 0.8268424206111444,
|
|
"grad_norm": 2.348358631134033,
|
|
"learning_rate": 3.662497478313496e-05,
|
|
"loss": 0.2011,
|
|
"step": 1380
|
|
},
|
|
{
|
|
"epoch": 0.8328340323547034,
|
|
"grad_norm": 4.720375061035156,
|
|
"learning_rate": 3.652410732297761e-05,
|
|
"loss": 0.2075,
|
|
"step": 1390
|
|
},
|
|
{
|
|
"epoch": 0.8388256440982624,
|
|
"grad_norm": 2.795454740524292,
|
|
"learning_rate": 3.642323986282026e-05,
|
|
"loss": 0.2136,
|
|
"step": 1400
|
|
},
|
|
{
|
|
"epoch": 0.8448172558418214,
|
|
"grad_norm": 1.7271804809570312,
|
|
"learning_rate": 3.63223724026629e-05,
|
|
"loss": 0.2004,
|
|
"step": 1410
|
|
},
|
|
{
|
|
"epoch": 0.8508088675853804,
|
|
"grad_norm": 3.5545125007629395,
|
|
"learning_rate": 3.622150494250555e-05,
|
|
"loss": 0.1925,
|
|
"step": 1420
|
|
},
|
|
{
|
|
"epoch": 0.8568004793289394,
|
|
"grad_norm": 2.8007822036743164,
|
|
"learning_rate": 3.61206374823482e-05,
|
|
"loss": 0.2042,
|
|
"step": 1430
|
|
},
|
|
{
|
|
"epoch": 0.8627920910724985,
|
|
"grad_norm": 2.51857852935791,
|
|
"learning_rate": 3.601977002219084e-05,
|
|
"loss": 0.1843,
|
|
"step": 1440
|
|
},
|
|
{
|
|
"epoch": 0.8687837028160575,
|
|
"grad_norm": 2.5232300758361816,
|
|
"learning_rate": 3.591890256203349e-05,
|
|
"loss": 0.1921,
|
|
"step": 1450
|
|
},
|
|
{
|
|
"epoch": 0.8747753145596165,
|
|
"grad_norm": 3.585514783859253,
|
|
"learning_rate": 3.581803510187613e-05,
|
|
"loss": 0.2168,
|
|
"step": 1460
|
|
},
|
|
{
|
|
"epoch": 0.8807669263031755,
|
|
"grad_norm": 3.1116299629211426,
|
|
"learning_rate": 3.571716764171878e-05,
|
|
"loss": 0.2088,
|
|
"step": 1470
|
|
},
|
|
{
|
|
"epoch": 0.8867585380467345,
|
|
"grad_norm": 2.461395263671875,
|
|
"learning_rate": 3.561630018156143e-05,
|
|
"loss": 0.1727,
|
|
"step": 1480
|
|
},
|
|
{
|
|
"epoch": 0.8927501497902935,
|
|
"grad_norm": 3.6343369483947754,
|
|
"learning_rate": 3.551543272140408e-05,
|
|
"loss": 0.2092,
|
|
"step": 1490
|
|
},
|
|
{
|
|
"epoch": 0.8987417615338527,
|
|
"grad_norm": 3.2808003425598145,
|
|
"learning_rate": 3.541456526124672e-05,
|
|
"loss": 0.2121,
|
|
"step": 1500
|
|
},
|
|
{
|
|
"epoch": 0.9047333732774117,
|
|
"grad_norm": 2.9965360164642334,
|
|
"learning_rate": 3.531369780108937e-05,
|
|
"loss": 0.1769,
|
|
"step": 1510
|
|
},
|
|
{
|
|
"epoch": 0.9107249850209707,
|
|
"grad_norm": 3.1718783378601074,
|
|
"learning_rate": 3.521283034093202e-05,
|
|
"loss": 0.2018,
|
|
"step": 1520
|
|
},
|
|
{
|
|
"epoch": 0.9167165967645297,
|
|
"grad_norm": 3.313483238220215,
|
|
"learning_rate": 3.511196288077466e-05,
|
|
"loss": 0.2089,
|
|
"step": 1530
|
|
},
|
|
{
|
|
"epoch": 0.9227082085080887,
|
|
"grad_norm": 4.256625652313232,
|
|
"learning_rate": 3.501109542061731e-05,
|
|
"loss": 0.2107,
|
|
"step": 1540
|
|
},
|
|
{
|
|
"epoch": 0.9286998202516477,
|
|
"grad_norm": 4.17952299118042,
|
|
"learning_rate": 3.491022796045996e-05,
|
|
"loss": 0.1871,
|
|
"step": 1550
|
|
},
|
|
{
|
|
"epoch": 0.9346914319952067,
|
|
"grad_norm": 2.3444721698760986,
|
|
"learning_rate": 3.48093605003026e-05,
|
|
"loss": 0.1679,
|
|
"step": 1560
|
|
},
|
|
{
|
|
"epoch": 0.9406830437387658,
|
|
"grad_norm": 3.664823293685913,
|
|
"learning_rate": 3.470849304014525e-05,
|
|
"loss": 0.1886,
|
|
"step": 1570
|
|
},
|
|
{
|
|
"epoch": 0.9466746554823248,
|
|
"grad_norm": 1.821067214012146,
|
|
"learning_rate": 3.460762557998789e-05,
|
|
"loss": 0.1583,
|
|
"step": 1580
|
|
},
|
|
{
|
|
"epoch": 0.9526662672258838,
|
|
"grad_norm": 4.162514686584473,
|
|
"learning_rate": 3.450675811983055e-05,
|
|
"loss": 0.1845,
|
|
"step": 1590
|
|
},
|
|
{
|
|
"epoch": 0.9586578789694428,
|
|
"grad_norm": 3.205587387084961,
|
|
"learning_rate": 3.440589065967319e-05,
|
|
"loss": 0.2166,
|
|
"step": 1600
|
|
},
|
|
{
|
|
"epoch": 0.9646494907130018,
|
|
"grad_norm": 2.805971622467041,
|
|
"learning_rate": 3.430502319951584e-05,
|
|
"loss": 0.1621,
|
|
"step": 1610
|
|
},
|
|
{
|
|
"epoch": 0.9706411024565608,
|
|
"grad_norm": 2.5269038677215576,
|
|
"learning_rate": 3.420415573935848e-05,
|
|
"loss": 0.1698,
|
|
"step": 1620
|
|
},
|
|
{
|
|
"epoch": 0.9766327142001199,
|
|
"grad_norm": 2.486302375793457,
|
|
"learning_rate": 3.410328827920113e-05,
|
|
"loss": 0.2071,
|
|
"step": 1630
|
|
},
|
|
{
|
|
"epoch": 0.9826243259436789,
|
|
"grad_norm": 3.5762979984283447,
|
|
"learning_rate": 3.400242081904378e-05,
|
|
"loss": 0.182,
|
|
"step": 1640
|
|
},
|
|
{
|
|
"epoch": 0.9886159376872379,
|
|
"grad_norm": 2.278040885925293,
|
|
"learning_rate": 3.390155335888642e-05,
|
|
"loss": 0.1826,
|
|
"step": 1650
|
|
},
|
|
{
|
|
"epoch": 0.9946075494307969,
|
|
"grad_norm": 2.813933849334717,
|
|
"learning_rate": 3.380068589872907e-05,
|
|
"loss": 0.1842,
|
|
"step": 1660
|
|
},
|
|
{
|
|
"epoch": 1.0005991611743559,
|
|
"grad_norm": 2.1713950634002686,
|
|
"learning_rate": 3.369981843857172e-05,
|
|
"loss": 0.1858,
|
|
"step": 1670
|
|
},
|
|
{
|
|
"epoch": 1.0065907729179149,
|
|
"grad_norm": 2.670079469680786,
|
|
"learning_rate": 3.359895097841437e-05,
|
|
"loss": 0.1146,
|
|
"step": 1680
|
|
},
|
|
{
|
|
"epoch": 1.0125823846614739,
|
|
"grad_norm": 1.8872461318969727,
|
|
"learning_rate": 3.349808351825701e-05,
|
|
"loss": 0.1229,
|
|
"step": 1690
|
|
},
|
|
{
|
|
"epoch": 1.0185739964050329,
|
|
"grad_norm": 2.9682705402374268,
|
|
"learning_rate": 3.339721605809965e-05,
|
|
"loss": 0.1212,
|
|
"step": 1700
|
|
},
|
|
{
|
|
"epoch": 1.0245656081485919,
|
|
"grad_norm": 2.689821720123291,
|
|
"learning_rate": 3.329634859794231e-05,
|
|
"loss": 0.1404,
|
|
"step": 1710
|
|
},
|
|
{
|
|
"epoch": 1.030557219892151,
|
|
"grad_norm": 2.6816999912261963,
|
|
"learning_rate": 3.319548113778495e-05,
|
|
"loss": 0.144,
|
|
"step": 1720
|
|
},
|
|
{
|
|
"epoch": 1.03654883163571,
|
|
"grad_norm": 2.6932919025421143,
|
|
"learning_rate": 3.30946136776276e-05,
|
|
"loss": 0.135,
|
|
"step": 1730
|
|
},
|
|
{
|
|
"epoch": 1.042540443379269,
|
|
"grad_norm": 2.344484567642212,
|
|
"learning_rate": 3.299374621747024e-05,
|
|
"loss": 0.1101,
|
|
"step": 1740
|
|
},
|
|
{
|
|
"epoch": 1.048532055122828,
|
|
"grad_norm": 3.399644613265991,
|
|
"learning_rate": 3.28928787573129e-05,
|
|
"loss": 0.1328,
|
|
"step": 1750
|
|
},
|
|
{
|
|
"epoch": 1.054523666866387,
|
|
"grad_norm": 2.168858289718628,
|
|
"learning_rate": 3.279201129715554e-05,
|
|
"loss": 0.1184,
|
|
"step": 1760
|
|
},
|
|
{
|
|
"epoch": 1.060515278609946,
|
|
"grad_norm": 2.3398423194885254,
|
|
"learning_rate": 3.269114383699818e-05,
|
|
"loss": 0.1464,
|
|
"step": 1770
|
|
},
|
|
{
|
|
"epoch": 1.066506890353505,
|
|
"grad_norm": 2.287614107131958,
|
|
"learning_rate": 3.259027637684083e-05,
|
|
"loss": 0.1323,
|
|
"step": 1780
|
|
},
|
|
{
|
|
"epoch": 1.072498502097064,
|
|
"grad_norm": 3.1191556453704834,
|
|
"learning_rate": 3.248940891668348e-05,
|
|
"loss": 0.1314,
|
|
"step": 1790
|
|
},
|
|
{
|
|
"epoch": 1.078490113840623,
|
|
"grad_norm": 2.123906135559082,
|
|
"learning_rate": 3.238854145652613e-05,
|
|
"loss": 0.1254,
|
|
"step": 1800
|
|
},
|
|
{
|
|
"epoch": 1.084481725584182,
|
|
"grad_norm": 2.0637898445129395,
|
|
"learning_rate": 3.228767399636877e-05,
|
|
"loss": 0.1296,
|
|
"step": 1810
|
|
},
|
|
{
|
|
"epoch": 1.090473337327741,
|
|
"grad_norm": 3.5648674964904785,
|
|
"learning_rate": 3.218680653621142e-05,
|
|
"loss": 0.1445,
|
|
"step": 1820
|
|
},
|
|
{
|
|
"epoch": 1.0964649490713,
|
|
"grad_norm": 3.08638334274292,
|
|
"learning_rate": 3.208593907605407e-05,
|
|
"loss": 0.122,
|
|
"step": 1830
|
|
},
|
|
{
|
|
"epoch": 1.102456560814859,
|
|
"grad_norm": 2.7976272106170654,
|
|
"learning_rate": 3.198507161589671e-05,
|
|
"loss": 0.1176,
|
|
"step": 1840
|
|
},
|
|
{
|
|
"epoch": 1.1084481725584183,
|
|
"grad_norm": 2.4950814247131348,
|
|
"learning_rate": 3.188420415573936e-05,
|
|
"loss": 0.1516,
|
|
"step": 1850
|
|
},
|
|
{
|
|
"epoch": 1.1144397843019773,
|
|
"grad_norm": 3.1080756187438965,
|
|
"learning_rate": 3.1783336695582e-05,
|
|
"loss": 0.1309,
|
|
"step": 1860
|
|
},
|
|
{
|
|
"epoch": 1.1204313960455363,
|
|
"grad_norm": 1.801734209060669,
|
|
"learning_rate": 3.168246923542466e-05,
|
|
"loss": 0.1276,
|
|
"step": 1870
|
|
},
|
|
{
|
|
"epoch": 1.1264230077890953,
|
|
"grad_norm": 1.831494927406311,
|
|
"learning_rate": 3.15816017752673e-05,
|
|
"loss": 0.1353,
|
|
"step": 1880
|
|
},
|
|
{
|
|
"epoch": 1.1324146195326543,
|
|
"grad_norm": 2.9599852561950684,
|
|
"learning_rate": 3.148073431510994e-05,
|
|
"loss": 0.1241,
|
|
"step": 1890
|
|
},
|
|
{
|
|
"epoch": 1.1384062312762133,
|
|
"grad_norm": 1.5874921083450317,
|
|
"learning_rate": 3.137986685495259e-05,
|
|
"loss": 0.1259,
|
|
"step": 1900
|
|
},
|
|
{
|
|
"epoch": 1.1443978430197723,
|
|
"grad_norm": 2.19362211227417,
|
|
"learning_rate": 3.127899939479524e-05,
|
|
"loss": 0.1177,
|
|
"step": 1910
|
|
},
|
|
{
|
|
"epoch": 1.1503894547633313,
|
|
"grad_norm": 1.6142268180847168,
|
|
"learning_rate": 3.117813193463789e-05,
|
|
"loss": 0.1196,
|
|
"step": 1920
|
|
},
|
|
{
|
|
"epoch": 1.1563810665068903,
|
|
"grad_norm": 3.0195200443267822,
|
|
"learning_rate": 3.107726447448053e-05,
|
|
"loss": 0.1235,
|
|
"step": 1930
|
|
},
|
|
{
|
|
"epoch": 1.1623726782504493,
|
|
"grad_norm": 2.401179075241089,
|
|
"learning_rate": 3.097639701432318e-05,
|
|
"loss": 0.1349,
|
|
"step": 1940
|
|
},
|
|
{
|
|
"epoch": 1.1683642899940083,
|
|
"grad_norm": 3.15936279296875,
|
|
"learning_rate": 3.087552955416583e-05,
|
|
"loss": 0.1273,
|
|
"step": 1950
|
|
},
|
|
{
|
|
"epoch": 1.1743559017375673,
|
|
"grad_norm": 2.6735379695892334,
|
|
"learning_rate": 3.077466209400847e-05,
|
|
"loss": 0.1003,
|
|
"step": 1960
|
|
},
|
|
{
|
|
"epoch": 1.1803475134811263,
|
|
"grad_norm": 2.687624216079712,
|
|
"learning_rate": 3.067379463385112e-05,
|
|
"loss": 0.1053,
|
|
"step": 1970
|
|
},
|
|
{
|
|
"epoch": 1.1863391252246855,
|
|
"grad_norm": 1.7122023105621338,
|
|
"learning_rate": 3.057292717369377e-05,
|
|
"loss": 0.1237,
|
|
"step": 1980
|
|
},
|
|
{
|
|
"epoch": 1.1923307369682445,
|
|
"grad_norm": 2.6045424938201904,
|
|
"learning_rate": 3.0472059713536415e-05,
|
|
"loss": 0.135,
|
|
"step": 1990
|
|
},
|
|
{
|
|
"epoch": 1.1983223487118035,
|
|
"grad_norm": 2.4615840911865234,
|
|
"learning_rate": 3.037119225337906e-05,
|
|
"loss": 0.1086,
|
|
"step": 2000
|
|
},
|
|
{
|
|
"epoch": 1.1983223487118035,
|
|
"eval_loss": 0.3957676291465759,
|
|
"eval_runtime": 1529.837,
|
|
"eval_samples_per_second": 0.927,
|
|
"eval_steps_per_second": 0.058,
|
|
"eval_wer": 0.42359088572897036,
|
|
"step": 2000
|
|
},
|
|
{
|
|
"epoch": 1.2043139604553625,
|
|
"grad_norm": 2.78117036819458,
|
|
"learning_rate": 3.027032479322171e-05,
|
|
"loss": 0.1399,
|
|
"step": 2010
|
|
},
|
|
{
|
|
"epoch": 1.2103055721989215,
|
|
"grad_norm": 3.1339714527130127,
|
|
"learning_rate": 3.0169457333064355e-05,
|
|
"loss": 0.1489,
|
|
"step": 2020
|
|
},
|
|
{
|
|
"epoch": 1.2162971839424805,
|
|
"grad_norm": 2.7637593746185303,
|
|
"learning_rate": 3.0068589872907e-05,
|
|
"loss": 0.1129,
|
|
"step": 2030
|
|
},
|
|
{
|
|
"epoch": 1.2222887956860395,
|
|
"grad_norm": 2.9713125228881836,
|
|
"learning_rate": 2.996772241274965e-05,
|
|
"loss": 0.1118,
|
|
"step": 2040
|
|
},
|
|
{
|
|
"epoch": 1.2282804074295985,
|
|
"grad_norm": 3.4637749195098877,
|
|
"learning_rate": 2.9866854952592295e-05,
|
|
"loss": 0.1173,
|
|
"step": 2050
|
|
},
|
|
{
|
|
"epoch": 1.2342720191731575,
|
|
"grad_norm": 3.3388004302978516,
|
|
"learning_rate": 2.9765987492434944e-05,
|
|
"loss": 0.1347,
|
|
"step": 2060
|
|
},
|
|
{
|
|
"epoch": 1.2402636309167165,
|
|
"grad_norm": 1.4394806623458862,
|
|
"learning_rate": 2.966512003227759e-05,
|
|
"loss": 0.1301,
|
|
"step": 2070
|
|
},
|
|
{
|
|
"epoch": 1.2462552426602755,
|
|
"grad_norm": 2.6756908893585205,
|
|
"learning_rate": 2.956425257212024e-05,
|
|
"loss": 0.1267,
|
|
"step": 2080
|
|
},
|
|
{
|
|
"epoch": 1.2522468544038348,
|
|
"grad_norm": 3.740405559539795,
|
|
"learning_rate": 2.9463385111962884e-05,
|
|
"loss": 0.1339,
|
|
"step": 2090
|
|
},
|
|
{
|
|
"epoch": 1.2582384661473935,
|
|
"grad_norm": 2.376574993133545,
|
|
"learning_rate": 2.9362517651805526e-05,
|
|
"loss": 0.1471,
|
|
"step": 2100
|
|
},
|
|
{
|
|
"epoch": 1.2642300778909528,
|
|
"grad_norm": 2.841569662094116,
|
|
"learning_rate": 2.9261650191648175e-05,
|
|
"loss": 0.107,
|
|
"step": 2110
|
|
},
|
|
{
|
|
"epoch": 1.2702216896345118,
|
|
"grad_norm": 2.9141831398010254,
|
|
"learning_rate": 2.916078273149082e-05,
|
|
"loss": 0.1216,
|
|
"step": 2120
|
|
},
|
|
{
|
|
"epoch": 1.2762133013780708,
|
|
"grad_norm": 2.759247303009033,
|
|
"learning_rate": 2.905991527133347e-05,
|
|
"loss": 0.1326,
|
|
"step": 2130
|
|
},
|
|
{
|
|
"epoch": 1.2822049131216298,
|
|
"grad_norm": 2.4907028675079346,
|
|
"learning_rate": 2.8959047811176115e-05,
|
|
"loss": 0.1193,
|
|
"step": 2140
|
|
},
|
|
{
|
|
"epoch": 1.2881965248651888,
|
|
"grad_norm": 3.2932450771331787,
|
|
"learning_rate": 2.8858180351018764e-05,
|
|
"loss": 0.1218,
|
|
"step": 2150
|
|
},
|
|
{
|
|
"epoch": 1.2941881366087478,
|
|
"grad_norm": 2.4969570636749268,
|
|
"learning_rate": 2.875731289086141e-05,
|
|
"loss": 0.1111,
|
|
"step": 2160
|
|
},
|
|
{
|
|
"epoch": 1.3001797483523068,
|
|
"grad_norm": 2.1168346405029297,
|
|
"learning_rate": 2.8656445430704055e-05,
|
|
"loss": 0.1267,
|
|
"step": 2170
|
|
},
|
|
{
|
|
"epoch": 1.3061713600958658,
|
|
"grad_norm": 2.1791086196899414,
|
|
"learning_rate": 2.8555577970546704e-05,
|
|
"loss": 0.1269,
|
|
"step": 2180
|
|
},
|
|
{
|
|
"epoch": 1.3121629718394248,
|
|
"grad_norm": 2.5179903507232666,
|
|
"learning_rate": 2.845471051038935e-05,
|
|
"loss": 0.1088,
|
|
"step": 2190
|
|
},
|
|
{
|
|
"epoch": 1.3181545835829838,
|
|
"grad_norm": 2.5732688903808594,
|
|
"learning_rate": 2.8353843050232e-05,
|
|
"loss": 0.1181,
|
|
"step": 2200
|
|
},
|
|
{
|
|
"epoch": 1.3241461953265428,
|
|
"grad_norm": 2.7886123657226562,
|
|
"learning_rate": 2.8252975590074644e-05,
|
|
"loss": 0.1242,
|
|
"step": 2210
|
|
},
|
|
{
|
|
"epoch": 1.330137807070102,
|
|
"grad_norm": 3.418480396270752,
|
|
"learning_rate": 2.8152108129917286e-05,
|
|
"loss": 0.1219,
|
|
"step": 2220
|
|
},
|
|
{
|
|
"epoch": 1.3361294188136608,
|
|
"grad_norm": 1.7312183380126953,
|
|
"learning_rate": 2.805124066975994e-05,
|
|
"loss": 0.1096,
|
|
"step": 2230
|
|
},
|
|
{
|
|
"epoch": 1.34212103055722,
|
|
"grad_norm": 1.4882137775421143,
|
|
"learning_rate": 2.795037320960258e-05,
|
|
"loss": 0.1034,
|
|
"step": 2240
|
|
},
|
|
{
|
|
"epoch": 1.348112642300779,
|
|
"grad_norm": 3.208034038543701,
|
|
"learning_rate": 2.7849505749445233e-05,
|
|
"loss": 0.1288,
|
|
"step": 2250
|
|
},
|
|
{
|
|
"epoch": 1.354104254044338,
|
|
"grad_norm": 2.543548583984375,
|
|
"learning_rate": 2.7748638289287875e-05,
|
|
"loss": 0.0991,
|
|
"step": 2260
|
|
},
|
|
{
|
|
"epoch": 1.360095865787897,
|
|
"grad_norm": 2.1029109954833984,
|
|
"learning_rate": 2.7647770829130528e-05,
|
|
"loss": 0.12,
|
|
"step": 2270
|
|
},
|
|
{
|
|
"epoch": 1.366087477531456,
|
|
"grad_norm": 1.8528416156768799,
|
|
"learning_rate": 2.754690336897317e-05,
|
|
"loss": 0.1116,
|
|
"step": 2280
|
|
},
|
|
{
|
|
"epoch": 1.372079089275015,
|
|
"grad_norm": 2.3777570724487305,
|
|
"learning_rate": 2.7446035908815815e-05,
|
|
"loss": 0.143,
|
|
"step": 2290
|
|
},
|
|
{
|
|
"epoch": 1.378070701018574,
|
|
"grad_norm": 2.3329262733459473,
|
|
"learning_rate": 2.7345168448658464e-05,
|
|
"loss": 0.1067,
|
|
"step": 2300
|
|
},
|
|
{
|
|
"epoch": 1.384062312762133,
|
|
"grad_norm": 1.7974488735198975,
|
|
"learning_rate": 2.724430098850111e-05,
|
|
"loss": 0.1287,
|
|
"step": 2310
|
|
},
|
|
{
|
|
"epoch": 1.390053924505692,
|
|
"grad_norm": 2.3591809272766113,
|
|
"learning_rate": 2.714343352834376e-05,
|
|
"loss": 0.1142,
|
|
"step": 2320
|
|
},
|
|
{
|
|
"epoch": 1.396045536249251,
|
|
"grad_norm": 1.9029614925384521,
|
|
"learning_rate": 2.7042566068186404e-05,
|
|
"loss": 0.1204,
|
|
"step": 2330
|
|
},
|
|
{
|
|
"epoch": 1.40203714799281,
|
|
"grad_norm": 2.7638425827026367,
|
|
"learning_rate": 2.6941698608029053e-05,
|
|
"loss": 0.1188,
|
|
"step": 2340
|
|
},
|
|
{
|
|
"epoch": 1.4080287597363692,
|
|
"grad_norm": 2.830389976501465,
|
|
"learning_rate": 2.68408311478717e-05,
|
|
"loss": 0.1167,
|
|
"step": 2350
|
|
},
|
|
{
|
|
"epoch": 1.414020371479928,
|
|
"grad_norm": 2.2718679904937744,
|
|
"learning_rate": 2.673996368771434e-05,
|
|
"loss": 0.1286,
|
|
"step": 2360
|
|
},
|
|
{
|
|
"epoch": 1.4200119832234872,
|
|
"grad_norm": 2.5862531661987305,
|
|
"learning_rate": 2.6639096227556993e-05,
|
|
"loss": 0.122,
|
|
"step": 2370
|
|
},
|
|
{
|
|
"epoch": 1.4260035949670462,
|
|
"grad_norm": 2.345581531524658,
|
|
"learning_rate": 2.6538228767399635e-05,
|
|
"loss": 0.1102,
|
|
"step": 2380
|
|
},
|
|
{
|
|
"epoch": 1.4319952067106052,
|
|
"grad_norm": 1.8267598152160645,
|
|
"learning_rate": 2.6437361307242288e-05,
|
|
"loss": 0.1139,
|
|
"step": 2390
|
|
},
|
|
{
|
|
"epoch": 1.4379868184541642,
|
|
"grad_norm": 2.5517992973327637,
|
|
"learning_rate": 2.633649384708493e-05,
|
|
"loss": 0.1173,
|
|
"step": 2400
|
|
},
|
|
{
|
|
"epoch": 1.4439784301977232,
|
|
"grad_norm": 2.2720143795013428,
|
|
"learning_rate": 2.6235626386927582e-05,
|
|
"loss": 0.1129,
|
|
"step": 2410
|
|
},
|
|
{
|
|
"epoch": 1.4499700419412822,
|
|
"grad_norm": 2.7546370029449463,
|
|
"learning_rate": 2.6134758926770224e-05,
|
|
"loss": 0.1394,
|
|
"step": 2420
|
|
},
|
|
{
|
|
"epoch": 1.4559616536848412,
|
|
"grad_norm": 3.8197317123413086,
|
|
"learning_rate": 2.603389146661287e-05,
|
|
"loss": 0.1276,
|
|
"step": 2430
|
|
},
|
|
{
|
|
"epoch": 1.4619532654284002,
|
|
"grad_norm": 1.800384283065796,
|
|
"learning_rate": 2.593302400645552e-05,
|
|
"loss": 0.0952,
|
|
"step": 2440
|
|
},
|
|
{
|
|
"epoch": 1.4679448771719592,
|
|
"grad_norm": 1.8722529411315918,
|
|
"learning_rate": 2.5832156546298164e-05,
|
|
"loss": 0.1015,
|
|
"step": 2450
|
|
},
|
|
{
|
|
"epoch": 1.4739364889155182,
|
|
"grad_norm": 2.2280287742614746,
|
|
"learning_rate": 2.5731289086140813e-05,
|
|
"loss": 0.109,
|
|
"step": 2460
|
|
},
|
|
{
|
|
"epoch": 1.4799281006590772,
|
|
"grad_norm": 2.547365665435791,
|
|
"learning_rate": 2.563042162598346e-05,
|
|
"loss": 0.1164,
|
|
"step": 2470
|
|
},
|
|
{
|
|
"epoch": 1.4859197124026364,
|
|
"grad_norm": 2.6080679893493652,
|
|
"learning_rate": 2.5529554165826108e-05,
|
|
"loss": 0.1213,
|
|
"step": 2480
|
|
},
|
|
{
|
|
"epoch": 1.4919113241461952,
|
|
"grad_norm": 2.250347852706909,
|
|
"learning_rate": 2.5428686705668753e-05,
|
|
"loss": 0.1107,
|
|
"step": 2490
|
|
},
|
|
{
|
|
"epoch": 1.4979029358897544,
|
|
"grad_norm": 2.15417742729187,
|
|
"learning_rate": 2.5327819245511396e-05,
|
|
"loss": 0.132,
|
|
"step": 2500
|
|
},
|
|
{
|
|
"epoch": 1.5038945476333132,
|
|
"grad_norm": 2.5671651363372803,
|
|
"learning_rate": 2.5226951785354048e-05,
|
|
"loss": 0.1218,
|
|
"step": 2510
|
|
},
|
|
{
|
|
"epoch": 1.5098861593768724,
|
|
"grad_norm": 2.304866313934326,
|
|
"learning_rate": 2.512608432519669e-05,
|
|
"loss": 0.1236,
|
|
"step": 2520
|
|
},
|
|
{
|
|
"epoch": 1.5158777711204314,
|
|
"grad_norm": 2.581188201904297,
|
|
"learning_rate": 2.5025216865039342e-05,
|
|
"loss": 0.1155,
|
|
"step": 2530
|
|
},
|
|
{
|
|
"epoch": 1.5218693828639904,
|
|
"grad_norm": 2.099909782409668,
|
|
"learning_rate": 2.4924349404881985e-05,
|
|
"loss": 0.1271,
|
|
"step": 2540
|
|
},
|
|
{
|
|
"epoch": 1.5278609946075494,
|
|
"grad_norm": 2.0890612602233887,
|
|
"learning_rate": 2.4823481944724633e-05,
|
|
"loss": 0.1107,
|
|
"step": 2550
|
|
},
|
|
{
|
|
"epoch": 1.5338526063511084,
|
|
"grad_norm": 2.7176012992858887,
|
|
"learning_rate": 2.472261448456728e-05,
|
|
"loss": 0.1244,
|
|
"step": 2560
|
|
},
|
|
{
|
|
"epoch": 1.5398442180946674,
|
|
"grad_norm": 2.306342840194702,
|
|
"learning_rate": 2.4621747024409928e-05,
|
|
"loss": 0.1197,
|
|
"step": 2570
|
|
},
|
|
{
|
|
"epoch": 1.5458358298382264,
|
|
"grad_norm": 1.9637123346328735,
|
|
"learning_rate": 2.4520879564252573e-05,
|
|
"loss": 0.121,
|
|
"step": 2580
|
|
},
|
|
{
|
|
"epoch": 1.5518274415817856,
|
|
"grad_norm": 2.171283006668091,
|
|
"learning_rate": 2.442001210409522e-05,
|
|
"loss": 0.1272,
|
|
"step": 2590
|
|
},
|
|
{
|
|
"epoch": 1.5578190533253444,
|
|
"grad_norm": 2.2798995971679688,
|
|
"learning_rate": 2.4319144643937865e-05,
|
|
"loss": 0.1181,
|
|
"step": 2600
|
|
},
|
|
{
|
|
"epoch": 1.5638106650689036,
|
|
"grad_norm": 2.048116683959961,
|
|
"learning_rate": 2.4218277183780514e-05,
|
|
"loss": 0.1296,
|
|
"step": 2610
|
|
},
|
|
{
|
|
"epoch": 1.5698022768124624,
|
|
"grad_norm": 2.062462091445923,
|
|
"learning_rate": 2.411740972362316e-05,
|
|
"loss": 0.1019,
|
|
"step": 2620
|
|
},
|
|
{
|
|
"epoch": 1.5757938885560216,
|
|
"grad_norm": 3.318964958190918,
|
|
"learning_rate": 2.4016542263465808e-05,
|
|
"loss": 0.1156,
|
|
"step": 2630
|
|
},
|
|
{
|
|
"epoch": 1.5817855002995804,
|
|
"grad_norm": 1.8189563751220703,
|
|
"learning_rate": 2.3915674803308454e-05,
|
|
"loss": 0.1131,
|
|
"step": 2640
|
|
},
|
|
{
|
|
"epoch": 1.5877771120431396,
|
|
"grad_norm": 2.640641689300537,
|
|
"learning_rate": 2.38148073431511e-05,
|
|
"loss": 0.1153,
|
|
"step": 2650
|
|
},
|
|
{
|
|
"epoch": 1.5937687237866986,
|
|
"grad_norm": 2.267590045928955,
|
|
"learning_rate": 2.3713939882993748e-05,
|
|
"loss": 0.1276,
|
|
"step": 2660
|
|
},
|
|
{
|
|
"epoch": 1.5997603355302576,
|
|
"grad_norm": 2.572810649871826,
|
|
"learning_rate": 2.3613072422836394e-05,
|
|
"loss": 0.1042,
|
|
"step": 2670
|
|
},
|
|
{
|
|
"epoch": 1.6057519472738166,
|
|
"grad_norm": 2.604325771331787,
|
|
"learning_rate": 2.351220496267904e-05,
|
|
"loss": 0.1387,
|
|
"step": 2680
|
|
},
|
|
{
|
|
"epoch": 1.6117435590173756,
|
|
"grad_norm": 1.9912638664245605,
|
|
"learning_rate": 2.3411337502521688e-05,
|
|
"loss": 0.1066,
|
|
"step": 2690
|
|
},
|
|
{
|
|
"epoch": 1.6177351707609346,
|
|
"grad_norm": 2.712186336517334,
|
|
"learning_rate": 2.3310470042364334e-05,
|
|
"loss": 0.105,
|
|
"step": 2700
|
|
},
|
|
{
|
|
"epoch": 1.6237267825044936,
|
|
"grad_norm": 3.0537188053131104,
|
|
"learning_rate": 2.3209602582206983e-05,
|
|
"loss": 0.1257,
|
|
"step": 2710
|
|
},
|
|
{
|
|
"epoch": 1.6297183942480529,
|
|
"grad_norm": 2.5793514251708984,
|
|
"learning_rate": 2.3108735122049628e-05,
|
|
"loss": 0.13,
|
|
"step": 2720
|
|
},
|
|
{
|
|
"epoch": 1.6357100059916116,
|
|
"grad_norm": 2.015709400177002,
|
|
"learning_rate": 2.3007867661892274e-05,
|
|
"loss": 0.1185,
|
|
"step": 2730
|
|
},
|
|
{
|
|
"epoch": 1.6417016177351709,
|
|
"grad_norm": 2.600104331970215,
|
|
"learning_rate": 2.2907000201734923e-05,
|
|
"loss": 0.1208,
|
|
"step": 2740
|
|
},
|
|
{
|
|
"epoch": 1.6476932294787296,
|
|
"grad_norm": 1.7748316526412964,
|
|
"learning_rate": 2.2806132741577568e-05,
|
|
"loss": 0.0968,
|
|
"step": 2750
|
|
},
|
|
{
|
|
"epoch": 1.6536848412222889,
|
|
"grad_norm": 3.2551629543304443,
|
|
"learning_rate": 2.2705265281420217e-05,
|
|
"loss": 0.105,
|
|
"step": 2760
|
|
},
|
|
{
|
|
"epoch": 1.6596764529658476,
|
|
"grad_norm": 2.5319693088531494,
|
|
"learning_rate": 2.2604397821262863e-05,
|
|
"loss": 0.1035,
|
|
"step": 2770
|
|
},
|
|
{
|
|
"epoch": 1.6656680647094069,
|
|
"grad_norm": 2.2642369270324707,
|
|
"learning_rate": 2.2503530361105508e-05,
|
|
"loss": 0.1082,
|
|
"step": 2780
|
|
},
|
|
{
|
|
"epoch": 1.6716596764529659,
|
|
"grad_norm": 2.3224713802337646,
|
|
"learning_rate": 2.2402662900948154e-05,
|
|
"loss": 0.1113,
|
|
"step": 2790
|
|
},
|
|
{
|
|
"epoch": 1.6776512881965249,
|
|
"grad_norm": 1.8430181741714478,
|
|
"learning_rate": 2.2301795440790803e-05,
|
|
"loss": 0.0963,
|
|
"step": 2800
|
|
},
|
|
{
|
|
"epoch": 1.6836428999400839,
|
|
"grad_norm": 2.5304131507873535,
|
|
"learning_rate": 2.2200927980633448e-05,
|
|
"loss": 0.1057,
|
|
"step": 2810
|
|
},
|
|
{
|
|
"epoch": 1.6896345116836429,
|
|
"grad_norm": 1.7761938571929932,
|
|
"learning_rate": 2.2100060520476097e-05,
|
|
"loss": 0.1158,
|
|
"step": 2820
|
|
},
|
|
{
|
|
"epoch": 1.6956261234272019,
|
|
"grad_norm": 2.3258302211761475,
|
|
"learning_rate": 2.1999193060318743e-05,
|
|
"loss": 0.1345,
|
|
"step": 2830
|
|
},
|
|
{
|
|
"epoch": 1.7016177351707609,
|
|
"grad_norm": 1.5975005626678467,
|
|
"learning_rate": 2.1898325600161392e-05,
|
|
"loss": 0.0934,
|
|
"step": 2840
|
|
},
|
|
{
|
|
"epoch": 1.70760934691432,
|
|
"grad_norm": 2.0967276096343994,
|
|
"learning_rate": 2.1797458140004034e-05,
|
|
"loss": 0.1222,
|
|
"step": 2850
|
|
},
|
|
{
|
|
"epoch": 1.7136009586578789,
|
|
"grad_norm": 2.689154863357544,
|
|
"learning_rate": 2.1696590679846683e-05,
|
|
"loss": 0.1021,
|
|
"step": 2860
|
|
},
|
|
{
|
|
"epoch": 1.719592570401438,
|
|
"grad_norm": 2.287780523300171,
|
|
"learning_rate": 2.159572321968933e-05,
|
|
"loss": 0.1163,
|
|
"step": 2870
|
|
},
|
|
{
|
|
"epoch": 1.7255841821449969,
|
|
"grad_norm": 2.54907488822937,
|
|
"learning_rate": 2.1494855759531977e-05,
|
|
"loss": 0.1093,
|
|
"step": 2880
|
|
},
|
|
{
|
|
"epoch": 1.731575793888556,
|
|
"grad_norm": 2.6872503757476807,
|
|
"learning_rate": 2.1393988299374623e-05,
|
|
"loss": 0.1195,
|
|
"step": 2890
|
|
},
|
|
{
|
|
"epoch": 1.737567405632115,
|
|
"grad_norm": 2.690882682800293,
|
|
"learning_rate": 2.1293120839217272e-05,
|
|
"loss": 0.1036,
|
|
"step": 2900
|
|
},
|
|
{
|
|
"epoch": 1.743559017375674,
|
|
"grad_norm": 1.8093252182006836,
|
|
"learning_rate": 2.1192253379059917e-05,
|
|
"loss": 0.0956,
|
|
"step": 2910
|
|
},
|
|
{
|
|
"epoch": 1.749550629119233,
|
|
"grad_norm": 2.9592061042785645,
|
|
"learning_rate": 2.1091385918902563e-05,
|
|
"loss": 0.1186,
|
|
"step": 2920
|
|
},
|
|
{
|
|
"epoch": 1.755542240862792,
|
|
"grad_norm": 2.5291833877563477,
|
|
"learning_rate": 2.099051845874521e-05,
|
|
"loss": 0.1039,
|
|
"step": 2930
|
|
},
|
|
{
|
|
"epoch": 1.761533852606351,
|
|
"grad_norm": 1.820418119430542,
|
|
"learning_rate": 2.0889650998587857e-05,
|
|
"loss": 0.0998,
|
|
"step": 2940
|
|
},
|
|
{
|
|
"epoch": 1.76752546434991,
|
|
"grad_norm": 2.9867782592773438,
|
|
"learning_rate": 2.0788783538430503e-05,
|
|
"loss": 0.1226,
|
|
"step": 2950
|
|
},
|
|
{
|
|
"epoch": 1.773517076093469,
|
|
"grad_norm": 3.5829522609710693,
|
|
"learning_rate": 2.0687916078273152e-05,
|
|
"loss": 0.1139,
|
|
"step": 2960
|
|
},
|
|
{
|
|
"epoch": 1.779508687837028,
|
|
"grad_norm": 1.5877478122711182,
|
|
"learning_rate": 2.0587048618115797e-05,
|
|
"loss": 0.0974,
|
|
"step": 2970
|
|
},
|
|
{
|
|
"epoch": 1.7855002995805873,
|
|
"grad_norm": 2.036705255508423,
|
|
"learning_rate": 2.0486181157958443e-05,
|
|
"loss": 0.1058,
|
|
"step": 2980
|
|
},
|
|
{
|
|
"epoch": 1.791491911324146,
|
|
"grad_norm": 2.361468553543091,
|
|
"learning_rate": 2.038531369780109e-05,
|
|
"loss": 0.1031,
|
|
"step": 2990
|
|
},
|
|
{
|
|
"epoch": 1.7974835230677053,
|
|
"grad_norm": 3.4837324619293213,
|
|
"learning_rate": 2.0284446237643737e-05,
|
|
"loss": 0.1181,
|
|
"step": 3000
|
|
},
|
|
{
|
|
"epoch": 1.7974835230677053,
|
|
"eval_loss": 0.39589497447013855,
|
|
"eval_runtime": 1534.4493,
|
|
"eval_samples_per_second": 0.924,
|
|
"eval_steps_per_second": 0.058,
|
|
"eval_wer": 0.39158243389869224,
|
|
"step": 3000
|
|
},
|
|
{
|
|
"epoch": 1.803475134811264,
|
|
"grad_norm": 2.0314831733703613,
|
|
"learning_rate": 2.0183578777486383e-05,
|
|
"loss": 0.0952,
|
|
"step": 3010
|
|
},
|
|
{
|
|
"epoch": 1.8094667465548233,
|
|
"grad_norm": 2.6841511726379395,
|
|
"learning_rate": 2.0082711317329032e-05,
|
|
"loss": 0.1172,
|
|
"step": 3020
|
|
},
|
|
{
|
|
"epoch": 1.8154583582983823,
|
|
"grad_norm": 2.687436103820801,
|
|
"learning_rate": 1.9981843857171677e-05,
|
|
"loss": 0.0986,
|
|
"step": 3030
|
|
},
|
|
{
|
|
"epoch": 1.8214499700419413,
|
|
"grad_norm": 2.0085389614105225,
|
|
"learning_rate": 1.9880976397014326e-05,
|
|
"loss": 0.1105,
|
|
"step": 3040
|
|
},
|
|
{
|
|
"epoch": 1.8274415817855003,
|
|
"grad_norm": 1.7776066064834595,
|
|
"learning_rate": 1.9790195682872706e-05,
|
|
"loss": 0.1068,
|
|
"step": 3050
|
|
},
|
|
{
|
|
"epoch": 1.8334331935290593,
|
|
"grad_norm": 2.3934295177459717,
|
|
"learning_rate": 1.9689328222715355e-05,
|
|
"loss": 0.1042,
|
|
"step": 3060
|
|
},
|
|
{
|
|
"epoch": 1.8394248052726183,
|
|
"grad_norm": 1.7487627267837524,
|
|
"learning_rate": 1.9588460762558e-05,
|
|
"loss": 0.0973,
|
|
"step": 3070
|
|
},
|
|
{
|
|
"epoch": 1.8454164170161773,
|
|
"grad_norm": 1.7244535684585571,
|
|
"learning_rate": 1.9487593302400646e-05,
|
|
"loss": 0.1118,
|
|
"step": 3080
|
|
},
|
|
{
|
|
"epoch": 1.8514080287597365,
|
|
"grad_norm": 3.1214139461517334,
|
|
"learning_rate": 1.9386725842243295e-05,
|
|
"loss": 0.1077,
|
|
"step": 3090
|
|
},
|
|
{
|
|
"epoch": 1.8573996405032953,
|
|
"grad_norm": 2.187126398086548,
|
|
"learning_rate": 1.928585838208594e-05,
|
|
"loss": 0.1208,
|
|
"step": 3100
|
|
},
|
|
{
|
|
"epoch": 1.8633912522468545,
|
|
"grad_norm": 1.8465856313705444,
|
|
"learning_rate": 1.9184990921928586e-05,
|
|
"loss": 0.098,
|
|
"step": 3110
|
|
},
|
|
{
|
|
"epoch": 1.8693828639904133,
|
|
"grad_norm": 3.0469441413879395,
|
|
"learning_rate": 1.9084123461771235e-05,
|
|
"loss": 0.0905,
|
|
"step": 3120
|
|
},
|
|
{
|
|
"epoch": 1.8753744757339725,
|
|
"grad_norm": 1.5280126333236694,
|
|
"learning_rate": 1.898325600161388e-05,
|
|
"loss": 0.0998,
|
|
"step": 3130
|
|
},
|
|
{
|
|
"epoch": 1.8813660874775313,
|
|
"grad_norm": 1.8420058488845825,
|
|
"learning_rate": 1.888238854145653e-05,
|
|
"loss": 0.1108,
|
|
"step": 3140
|
|
},
|
|
{
|
|
"epoch": 1.8873576992210905,
|
|
"grad_norm": 1.4494051933288574,
|
|
"learning_rate": 1.8781521081299175e-05,
|
|
"loss": 0.095,
|
|
"step": 3150
|
|
},
|
|
{
|
|
"epoch": 1.8933493109646495,
|
|
"grad_norm": 2.12457013130188,
|
|
"learning_rate": 1.868065362114182e-05,
|
|
"loss": 0.1015,
|
|
"step": 3160
|
|
},
|
|
{
|
|
"epoch": 1.8993409227082085,
|
|
"grad_norm": 1.672364592552185,
|
|
"learning_rate": 1.8579786160984466e-05,
|
|
"loss": 0.1107,
|
|
"step": 3170
|
|
},
|
|
{
|
|
"epoch": 1.9053325344517675,
|
|
"grad_norm": 1.6993036270141602,
|
|
"learning_rate": 1.8478918700827115e-05,
|
|
"loss": 0.0929,
|
|
"step": 3180
|
|
},
|
|
{
|
|
"epoch": 1.9113241461953265,
|
|
"grad_norm": 2.326103448867798,
|
|
"learning_rate": 1.837805124066976e-05,
|
|
"loss": 0.1141,
|
|
"step": 3190
|
|
},
|
|
{
|
|
"epoch": 1.9173157579388855,
|
|
"grad_norm": 2.1768336296081543,
|
|
"learning_rate": 1.827718378051241e-05,
|
|
"loss": 0.0883,
|
|
"step": 3200
|
|
},
|
|
{
|
|
"epoch": 1.9233073696824445,
|
|
"grad_norm": 1.8029934167861938,
|
|
"learning_rate": 1.8176316320355055e-05,
|
|
"loss": 0.0974,
|
|
"step": 3210
|
|
},
|
|
{
|
|
"epoch": 1.9292989814260038,
|
|
"grad_norm": 1.255569338798523,
|
|
"learning_rate": 1.8075448860197704e-05,
|
|
"loss": 0.1032,
|
|
"step": 3220
|
|
},
|
|
{
|
|
"epoch": 1.9352905931695625,
|
|
"grad_norm": 1.9688622951507568,
|
|
"learning_rate": 1.7974581400040346e-05,
|
|
"loss": 0.1133,
|
|
"step": 3230
|
|
},
|
|
{
|
|
"epoch": 1.9412822049131218,
|
|
"grad_norm": 1.9603787660598755,
|
|
"learning_rate": 1.7873713939882995e-05,
|
|
"loss": 0.0965,
|
|
"step": 3240
|
|
},
|
|
{
|
|
"epoch": 1.9472738166566805,
|
|
"grad_norm": 2.000797748565674,
|
|
"learning_rate": 1.777284647972564e-05,
|
|
"loss": 0.0958,
|
|
"step": 3250
|
|
},
|
|
{
|
|
"epoch": 1.9532654284002398,
|
|
"grad_norm": 2.369920015335083,
|
|
"learning_rate": 1.767197901956829e-05,
|
|
"loss": 0.0887,
|
|
"step": 3260
|
|
},
|
|
{
|
|
"epoch": 1.9592570401437985,
|
|
"grad_norm": 2.7224764823913574,
|
|
"learning_rate": 1.7571111559410935e-05,
|
|
"loss": 0.0886,
|
|
"step": 3270
|
|
},
|
|
{
|
|
"epoch": 1.9652486518873578,
|
|
"grad_norm": 2.3583805561065674,
|
|
"learning_rate": 1.7470244099253584e-05,
|
|
"loss": 0.1129,
|
|
"step": 3280
|
|
},
|
|
{
|
|
"epoch": 1.9712402636309168,
|
|
"grad_norm": 1.8538249731063843,
|
|
"learning_rate": 1.7369376639096226e-05,
|
|
"loss": 0.1154,
|
|
"step": 3290
|
|
},
|
|
{
|
|
"epoch": 1.9772318753744758,
|
|
"grad_norm": 3.2503836154937744,
|
|
"learning_rate": 1.7268509178938875e-05,
|
|
"loss": 0.0976,
|
|
"step": 3300
|
|
},
|
|
{
|
|
"epoch": 1.9832234871180348,
|
|
"grad_norm": 2.481663703918457,
|
|
"learning_rate": 1.716764171878152e-05,
|
|
"loss": 0.1121,
|
|
"step": 3310
|
|
},
|
|
{
|
|
"epoch": 1.9892150988615938,
|
|
"grad_norm": 1.6555776596069336,
|
|
"learning_rate": 1.706677425862417e-05,
|
|
"loss": 0.0901,
|
|
"step": 3320
|
|
},
|
|
{
|
|
"epoch": 1.9952067106051528,
|
|
"grad_norm": 2.6977438926696777,
|
|
"learning_rate": 1.6965906798466815e-05,
|
|
"loss": 0.1068,
|
|
"step": 3330
|
|
},
|
|
{
|
|
"epoch": 2.0011983223487118,
|
|
"grad_norm": 1.913468599319458,
|
|
"learning_rate": 1.6865039338309464e-05,
|
|
"loss": 0.086,
|
|
"step": 3340
|
|
},
|
|
{
|
|
"epoch": 2.007189934092271,
|
|
"grad_norm": 1.3262066841125488,
|
|
"learning_rate": 1.676417187815211e-05,
|
|
"loss": 0.0658,
|
|
"step": 3350
|
|
},
|
|
{
|
|
"epoch": 2.0131815458358298,
|
|
"grad_norm": 1.445493459701538,
|
|
"learning_rate": 1.6663304417994755e-05,
|
|
"loss": 0.0605,
|
|
"step": 3360
|
|
},
|
|
{
|
|
"epoch": 2.019173157579389,
|
|
"grad_norm": 1.2603121995925903,
|
|
"learning_rate": 1.65624369578374e-05,
|
|
"loss": 0.0566,
|
|
"step": 3370
|
|
},
|
|
{
|
|
"epoch": 2.0251647693229478,
|
|
"grad_norm": 1.7415363788604736,
|
|
"learning_rate": 1.646156949768005e-05,
|
|
"loss": 0.0543,
|
|
"step": 3380
|
|
},
|
|
{
|
|
"epoch": 2.031156381066507,
|
|
"grad_norm": 1.135882019996643,
|
|
"learning_rate": 1.6360702037522695e-05,
|
|
"loss": 0.0547,
|
|
"step": 3390
|
|
},
|
|
{
|
|
"epoch": 2.0371479928100658,
|
|
"grad_norm": 1.364913821220398,
|
|
"learning_rate": 1.6259834577365344e-05,
|
|
"loss": 0.0494,
|
|
"step": 3400
|
|
},
|
|
{
|
|
"epoch": 2.043139604553625,
|
|
"grad_norm": 2.1429061889648438,
|
|
"learning_rate": 1.615896711720799e-05,
|
|
"loss": 0.0599,
|
|
"step": 3410
|
|
},
|
|
{
|
|
"epoch": 2.0491312162971838,
|
|
"grad_norm": 2.314098596572876,
|
|
"learning_rate": 1.605809965705064e-05,
|
|
"loss": 0.0562,
|
|
"step": 3420
|
|
},
|
|
{
|
|
"epoch": 2.055122828040743,
|
|
"grad_norm": 1.4434194564819336,
|
|
"learning_rate": 1.595723219689328e-05,
|
|
"loss": 0.0568,
|
|
"step": 3430
|
|
},
|
|
{
|
|
"epoch": 2.061114439784302,
|
|
"grad_norm": 2.2602288722991943,
|
|
"learning_rate": 1.585636473673593e-05,
|
|
"loss": 0.0539,
|
|
"step": 3440
|
|
},
|
|
{
|
|
"epoch": 2.067106051527861,
|
|
"grad_norm": 1.6304532289505005,
|
|
"learning_rate": 1.5755497276578575e-05,
|
|
"loss": 0.0532,
|
|
"step": 3450
|
|
},
|
|
{
|
|
"epoch": 2.07309766327142,
|
|
"grad_norm": 2.1285336017608643,
|
|
"learning_rate": 1.5654629816421224e-05,
|
|
"loss": 0.0608,
|
|
"step": 3460
|
|
},
|
|
{
|
|
"epoch": 2.079089275014979,
|
|
"grad_norm": 1.5324342250823975,
|
|
"learning_rate": 1.555376235626387e-05,
|
|
"loss": 0.056,
|
|
"step": 3470
|
|
},
|
|
{
|
|
"epoch": 2.085080886758538,
|
|
"grad_norm": 2.044532060623169,
|
|
"learning_rate": 1.545289489610652e-05,
|
|
"loss": 0.0544,
|
|
"step": 3480
|
|
},
|
|
{
|
|
"epoch": 2.091072498502097,
|
|
"grad_norm": 1.922879934310913,
|
|
"learning_rate": 1.535202743594916e-05,
|
|
"loss": 0.0624,
|
|
"step": 3490
|
|
},
|
|
{
|
|
"epoch": 2.097064110245656,
|
|
"grad_norm": 2.054161310195923,
|
|
"learning_rate": 1.525115997579181e-05,
|
|
"loss": 0.0573,
|
|
"step": 3500
|
|
},
|
|
{
|
|
"epoch": 2.103055721989215,
|
|
"grad_norm": 1.7563127279281616,
|
|
"learning_rate": 1.5150292515634457e-05,
|
|
"loss": 0.0689,
|
|
"step": 3510
|
|
},
|
|
{
|
|
"epoch": 2.109047333732774,
|
|
"grad_norm": 1.2950539588928223,
|
|
"learning_rate": 1.5049425055477104e-05,
|
|
"loss": 0.0572,
|
|
"step": 3520
|
|
},
|
|
{
|
|
"epoch": 2.115038945476333,
|
|
"grad_norm": 2.48503041267395,
|
|
"learning_rate": 1.4948557595319751e-05,
|
|
"loss": 0.0584,
|
|
"step": 3530
|
|
},
|
|
{
|
|
"epoch": 2.121030557219892,
|
|
"grad_norm": 3.5458927154541016,
|
|
"learning_rate": 1.4847690135162399e-05,
|
|
"loss": 0.0591,
|
|
"step": 3540
|
|
},
|
|
{
|
|
"epoch": 2.127022168963451,
|
|
"grad_norm": 1.8503930568695068,
|
|
"learning_rate": 1.4746822675005046e-05,
|
|
"loss": 0.0525,
|
|
"step": 3550
|
|
},
|
|
{
|
|
"epoch": 2.13301378070701,
|
|
"grad_norm": 1.5099945068359375,
|
|
"learning_rate": 1.464595521484769e-05,
|
|
"loss": 0.059,
|
|
"step": 3560
|
|
},
|
|
{
|
|
"epoch": 2.139005392450569,
|
|
"grad_norm": 1.6359002590179443,
|
|
"learning_rate": 1.4545087754690337e-05,
|
|
"loss": 0.0482,
|
|
"step": 3570
|
|
},
|
|
{
|
|
"epoch": 2.144997004194128,
|
|
"grad_norm": 1.9500666856765747,
|
|
"learning_rate": 1.4444220294532984e-05,
|
|
"loss": 0.0596,
|
|
"step": 3580
|
|
},
|
|
{
|
|
"epoch": 2.1509886159376874,
|
|
"grad_norm": 1.7838146686553955,
|
|
"learning_rate": 1.4343352834375632e-05,
|
|
"loss": 0.0574,
|
|
"step": 3590
|
|
},
|
|
{
|
|
"epoch": 2.156980227681246,
|
|
"grad_norm": 1.5071059465408325,
|
|
"learning_rate": 1.4242485374218279e-05,
|
|
"loss": 0.0424,
|
|
"step": 3600
|
|
},
|
|
{
|
|
"epoch": 2.1629718394248054,
|
|
"grad_norm": 2.2160496711730957,
|
|
"learning_rate": 1.4141617914060926e-05,
|
|
"loss": 0.0608,
|
|
"step": 3610
|
|
},
|
|
{
|
|
"epoch": 2.168963451168364,
|
|
"grad_norm": 1.6579121351242065,
|
|
"learning_rate": 1.404075045390357e-05,
|
|
"loss": 0.0547,
|
|
"step": 3620
|
|
},
|
|
{
|
|
"epoch": 2.1749550629119234,
|
|
"grad_norm": 2.632821798324585,
|
|
"learning_rate": 1.3939882993746217e-05,
|
|
"loss": 0.0779,
|
|
"step": 3630
|
|
},
|
|
{
|
|
"epoch": 2.180946674655482,
|
|
"grad_norm": 1.9470138549804688,
|
|
"learning_rate": 1.3839015533588864e-05,
|
|
"loss": 0.0596,
|
|
"step": 3640
|
|
},
|
|
{
|
|
"epoch": 2.1869382863990414,
|
|
"grad_norm": 1.6838595867156982,
|
|
"learning_rate": 1.3738148073431512e-05,
|
|
"loss": 0.051,
|
|
"step": 3650
|
|
},
|
|
{
|
|
"epoch": 2.1929298981426,
|
|
"grad_norm": 1.5005348920822144,
|
|
"learning_rate": 1.3637280613274159e-05,
|
|
"loss": 0.061,
|
|
"step": 3660
|
|
},
|
|
{
|
|
"epoch": 2.1989215098861594,
|
|
"grad_norm": 1.6386892795562744,
|
|
"learning_rate": 1.3536413153116806e-05,
|
|
"loss": 0.0482,
|
|
"step": 3670
|
|
},
|
|
{
|
|
"epoch": 2.204913121629718,
|
|
"grad_norm": 1.9598783254623413,
|
|
"learning_rate": 1.3435545692959453e-05,
|
|
"loss": 0.0551,
|
|
"step": 3680
|
|
},
|
|
{
|
|
"epoch": 2.2109047333732774,
|
|
"grad_norm": 2.5483479499816895,
|
|
"learning_rate": 1.3334678232802097e-05,
|
|
"loss": 0.0664,
|
|
"step": 3690
|
|
},
|
|
{
|
|
"epoch": 2.2168963451168366,
|
|
"grad_norm": 1.7083418369293213,
|
|
"learning_rate": 1.3233810772644744e-05,
|
|
"loss": 0.08,
|
|
"step": 3700
|
|
},
|
|
{
|
|
"epoch": 2.2228879568603954,
|
|
"grad_norm": 1.9843679666519165,
|
|
"learning_rate": 1.3132943312487392e-05,
|
|
"loss": 0.0531,
|
|
"step": 3710
|
|
},
|
|
{
|
|
"epoch": 2.2288795686039546,
|
|
"grad_norm": 1.2589327096939087,
|
|
"learning_rate": 1.3032075852330039e-05,
|
|
"loss": 0.0569,
|
|
"step": 3720
|
|
},
|
|
{
|
|
"epoch": 2.2348711803475134,
|
|
"grad_norm": 2.206603527069092,
|
|
"learning_rate": 1.2931208392172686e-05,
|
|
"loss": 0.052,
|
|
"step": 3730
|
|
},
|
|
{
|
|
"epoch": 2.2408627920910726,
|
|
"grad_norm": 1.1368589401245117,
|
|
"learning_rate": 1.2830340932015333e-05,
|
|
"loss": 0.0607,
|
|
"step": 3740
|
|
},
|
|
{
|
|
"epoch": 2.2468544038346314,
|
|
"grad_norm": 1.757006049156189,
|
|
"learning_rate": 1.272947347185798e-05,
|
|
"loss": 0.0376,
|
|
"step": 3750
|
|
},
|
|
{
|
|
"epoch": 2.2528460155781906,
|
|
"grad_norm": 1.6406621932983398,
|
|
"learning_rate": 1.2628606011700625e-05,
|
|
"loss": 0.0696,
|
|
"step": 3760
|
|
},
|
|
{
|
|
"epoch": 2.2588376273217494,
|
|
"grad_norm": 1.9685550928115845,
|
|
"learning_rate": 1.2527738551543272e-05,
|
|
"loss": 0.0531,
|
|
"step": 3770
|
|
},
|
|
{
|
|
"epoch": 2.2648292390653086,
|
|
"grad_norm": 1.26189386844635,
|
|
"learning_rate": 1.2426871091385919e-05,
|
|
"loss": 0.0542,
|
|
"step": 3780
|
|
},
|
|
{
|
|
"epoch": 2.2708208508088674,
|
|
"grad_norm": 2.2154734134674072,
|
|
"learning_rate": 1.2326003631228566e-05,
|
|
"loss": 0.0647,
|
|
"step": 3790
|
|
},
|
|
{
|
|
"epoch": 2.2768124625524266,
|
|
"grad_norm": 1.3167383670806885,
|
|
"learning_rate": 1.2225136171071214e-05,
|
|
"loss": 0.0536,
|
|
"step": 3800
|
|
},
|
|
{
|
|
"epoch": 2.282804074295986,
|
|
"grad_norm": 3.2605197429656982,
|
|
"learning_rate": 1.2124268710913859e-05,
|
|
"loss": 0.0607,
|
|
"step": 3810
|
|
},
|
|
{
|
|
"epoch": 2.2887956860395446,
|
|
"grad_norm": 1.6796486377716064,
|
|
"learning_rate": 1.2023401250756506e-05,
|
|
"loss": 0.0556,
|
|
"step": 3820
|
|
},
|
|
{
|
|
"epoch": 2.2947872977831034,
|
|
"grad_norm": 1.8123384714126587,
|
|
"learning_rate": 1.1922533790599154e-05,
|
|
"loss": 0.0492,
|
|
"step": 3830
|
|
},
|
|
{
|
|
"epoch": 2.3007789095266626,
|
|
"grad_norm": 2.183927059173584,
|
|
"learning_rate": 1.1821666330441799e-05,
|
|
"loss": 0.0555,
|
|
"step": 3840
|
|
},
|
|
{
|
|
"epoch": 2.306770521270222,
|
|
"grad_norm": 1.6841498613357544,
|
|
"learning_rate": 1.1720798870284446e-05,
|
|
"loss": 0.0521,
|
|
"step": 3850
|
|
},
|
|
{
|
|
"epoch": 2.3127621330137806,
|
|
"grad_norm": 1.5228015184402466,
|
|
"learning_rate": 1.1619931410127094e-05,
|
|
"loss": 0.0518,
|
|
"step": 3860
|
|
},
|
|
{
|
|
"epoch": 2.31875374475734,
|
|
"grad_norm": 1.1061369180679321,
|
|
"learning_rate": 1.151906394996974e-05,
|
|
"loss": 0.0467,
|
|
"step": 3870
|
|
},
|
|
{
|
|
"epoch": 2.3247453565008986,
|
|
"grad_norm": 1.0257222652435303,
|
|
"learning_rate": 1.1418196489812386e-05,
|
|
"loss": 0.0602,
|
|
"step": 3880
|
|
},
|
|
{
|
|
"epoch": 2.330736968244458,
|
|
"grad_norm": 1.333539366722107,
|
|
"learning_rate": 1.1317329029655034e-05,
|
|
"loss": 0.065,
|
|
"step": 3890
|
|
},
|
|
{
|
|
"epoch": 2.3367285799880166,
|
|
"grad_norm": 1.477871298789978,
|
|
"learning_rate": 1.121646156949768e-05,
|
|
"loss": 0.0578,
|
|
"step": 3900
|
|
},
|
|
{
|
|
"epoch": 2.342720191731576,
|
|
"grad_norm": 1.7921220064163208,
|
|
"learning_rate": 1.1115594109340326e-05,
|
|
"loss": 0.0475,
|
|
"step": 3910
|
|
},
|
|
{
|
|
"epoch": 2.3487118034751346,
|
|
"grad_norm": 1.7656147480010986,
|
|
"learning_rate": 1.1014726649182974e-05,
|
|
"loss": 0.0602,
|
|
"step": 3920
|
|
},
|
|
{
|
|
"epoch": 2.354703415218694,
|
|
"grad_norm": 1.8412047624588013,
|
|
"learning_rate": 1.0913859189025621e-05,
|
|
"loss": 0.0641,
|
|
"step": 3930
|
|
},
|
|
{
|
|
"epoch": 2.3606950269622526,
|
|
"grad_norm": 1.4820233583450317,
|
|
"learning_rate": 1.0812991728868266e-05,
|
|
"loss": 0.0677,
|
|
"step": 3940
|
|
},
|
|
{
|
|
"epoch": 2.366686638705812,
|
|
"grad_norm": 1.366188883781433,
|
|
"learning_rate": 1.0712124268710914e-05,
|
|
"loss": 0.0587,
|
|
"step": 3950
|
|
},
|
|
{
|
|
"epoch": 2.372678250449371,
|
|
"grad_norm": 1.3289388418197632,
|
|
"learning_rate": 1.0611256808553561e-05,
|
|
"loss": 0.0573,
|
|
"step": 3960
|
|
},
|
|
{
|
|
"epoch": 2.37866986219293,
|
|
"grad_norm": 1.1990536451339722,
|
|
"learning_rate": 1.0510389348396207e-05,
|
|
"loss": 0.0515,
|
|
"step": 3970
|
|
},
|
|
{
|
|
"epoch": 2.384661473936489,
|
|
"grad_norm": 2.262007236480713,
|
|
"learning_rate": 1.0409521888238854e-05,
|
|
"loss": 0.0573,
|
|
"step": 3980
|
|
},
|
|
{
|
|
"epoch": 2.390653085680048,
|
|
"grad_norm": 1.7814624309539795,
|
|
"learning_rate": 1.0308654428081501e-05,
|
|
"loss": 0.0511,
|
|
"step": 3990
|
|
},
|
|
{
|
|
"epoch": 2.396644697423607,
|
|
"grad_norm": 1.9160945415496826,
|
|
"learning_rate": 1.0207786967924148e-05,
|
|
"loss": 0.054,
|
|
"step": 4000
|
|
},
|
|
{
|
|
"epoch": 2.396644697423607,
|
|
"eval_loss": 0.38849014043807983,
|
|
"eval_runtime": 1731.1558,
|
|
"eval_samples_per_second": 0.819,
|
|
"eval_steps_per_second": 0.051,
|
|
"eval_wer": 0.38532922163211697,
|
|
"step": 4000
|
|
},
|
|
{
|
|
"epoch": 2.402636309167166,
|
|
"grad_norm": 1.8122146129608154,
|
|
"learning_rate": 1.0106919507766796e-05,
|
|
"loss": 0.0453,
|
|
"step": 4010
|
|
},
|
|
{
|
|
"epoch": 2.408627920910725,
|
|
"grad_norm": 1.0477360486984253,
|
|
"learning_rate": 1.0006052047609441e-05,
|
|
"loss": 0.053,
|
|
"step": 4020
|
|
},
|
|
{
|
|
"epoch": 2.414619532654284,
|
|
"grad_norm": 1.326602578163147,
|
|
"learning_rate": 9.905184587452088e-06,
|
|
"loss": 0.0499,
|
|
"step": 4030
|
|
},
|
|
{
|
|
"epoch": 2.420611144397843,
|
|
"grad_norm": 1.8951810598373413,
|
|
"learning_rate": 9.804317127294736e-06,
|
|
"loss": 0.052,
|
|
"step": 4040
|
|
},
|
|
{
|
|
"epoch": 2.426602756141402,
|
|
"grad_norm": 3.2080581188201904,
|
|
"learning_rate": 9.703449667137383e-06,
|
|
"loss": 0.058,
|
|
"step": 4050
|
|
},
|
|
{
|
|
"epoch": 2.432594367884961,
|
|
"grad_norm": 2.5068373680114746,
|
|
"learning_rate": 9.602582206980028e-06,
|
|
"loss": 0.0499,
|
|
"step": 4060
|
|
},
|
|
{
|
|
"epoch": 2.4385859796285203,
|
|
"grad_norm": 1.7612242698669434,
|
|
"learning_rate": 9.501714746822676e-06,
|
|
"loss": 0.0533,
|
|
"step": 4070
|
|
},
|
|
{
|
|
"epoch": 2.444577591372079,
|
|
"grad_norm": 1.7996723651885986,
|
|
"learning_rate": 9.400847286665323e-06,
|
|
"loss": 0.0564,
|
|
"step": 4080
|
|
},
|
|
{
|
|
"epoch": 2.450569203115638,
|
|
"grad_norm": 1.415356993675232,
|
|
"learning_rate": 9.29997982650797e-06,
|
|
"loss": 0.0508,
|
|
"step": 4090
|
|
},
|
|
{
|
|
"epoch": 2.456560814859197,
|
|
"grad_norm": 1.7406309843063354,
|
|
"learning_rate": 9.199112366350616e-06,
|
|
"loss": 0.0509,
|
|
"step": 4100
|
|
},
|
|
{
|
|
"epoch": 2.4625524266027563,
|
|
"grad_norm": 1.6858899593353271,
|
|
"learning_rate": 9.098244906193263e-06,
|
|
"loss": 0.0597,
|
|
"step": 4110
|
|
},
|
|
{
|
|
"epoch": 2.468544038346315,
|
|
"grad_norm": 1.3853732347488403,
|
|
"learning_rate": 8.99737744603591e-06,
|
|
"loss": 0.0642,
|
|
"step": 4120
|
|
},
|
|
{
|
|
"epoch": 2.4745356500898743,
|
|
"grad_norm": 1.2157524824142456,
|
|
"learning_rate": 8.896509985878557e-06,
|
|
"loss": 0.0428,
|
|
"step": 4130
|
|
},
|
|
{
|
|
"epoch": 2.480527261833433,
|
|
"grad_norm": 1.7540271282196045,
|
|
"learning_rate": 8.795642525721203e-06,
|
|
"loss": 0.0466,
|
|
"step": 4140
|
|
},
|
|
{
|
|
"epoch": 2.4865188735769923,
|
|
"grad_norm": 2.2730844020843506,
|
|
"learning_rate": 8.69477506556385e-06,
|
|
"loss": 0.0554,
|
|
"step": 4150
|
|
},
|
|
{
|
|
"epoch": 2.492510485320551,
|
|
"grad_norm": 1.7424499988555908,
|
|
"learning_rate": 8.593907605406497e-06,
|
|
"loss": 0.054,
|
|
"step": 4160
|
|
},
|
|
{
|
|
"epoch": 2.4985020970641103,
|
|
"grad_norm": 2.139974594116211,
|
|
"learning_rate": 8.493040145249143e-06,
|
|
"loss": 0.0567,
|
|
"step": 4170
|
|
},
|
|
{
|
|
"epoch": 2.5044937088076695,
|
|
"grad_norm": 1.306476354598999,
|
|
"learning_rate": 8.39217268509179e-06,
|
|
"loss": 0.0482,
|
|
"step": 4180
|
|
},
|
|
{
|
|
"epoch": 2.5104853205512283,
|
|
"grad_norm": 1.921081304550171,
|
|
"learning_rate": 8.291305224934437e-06,
|
|
"loss": 0.0417,
|
|
"step": 4190
|
|
},
|
|
{
|
|
"epoch": 2.516476932294787,
|
|
"grad_norm": 2.030853271484375,
|
|
"learning_rate": 8.190437764777083e-06,
|
|
"loss": 0.0489,
|
|
"step": 4200
|
|
},
|
|
{
|
|
"epoch": 2.5224685440383463,
|
|
"grad_norm": 1.5279465913772583,
|
|
"learning_rate": 8.08957030461973e-06,
|
|
"loss": 0.0455,
|
|
"step": 4210
|
|
},
|
|
{
|
|
"epoch": 2.5284601557819055,
|
|
"grad_norm": 1.7720401287078857,
|
|
"learning_rate": 7.988702844462377e-06,
|
|
"loss": 0.0684,
|
|
"step": 4220
|
|
},
|
|
{
|
|
"epoch": 2.5344517675254643,
|
|
"grad_norm": 1.1482131481170654,
|
|
"learning_rate": 7.887835384305023e-06,
|
|
"loss": 0.0544,
|
|
"step": 4230
|
|
},
|
|
{
|
|
"epoch": 2.5404433792690235,
|
|
"grad_norm": 2.373537302017212,
|
|
"learning_rate": 7.78696792414767e-06,
|
|
"loss": 0.0709,
|
|
"step": 4240
|
|
},
|
|
{
|
|
"epoch": 2.5464349910125823,
|
|
"grad_norm": 1.1651583909988403,
|
|
"learning_rate": 7.686100463990318e-06,
|
|
"loss": 0.0465,
|
|
"step": 4250
|
|
},
|
|
{
|
|
"epoch": 2.5524266027561415,
|
|
"grad_norm": 2.0321195125579834,
|
|
"learning_rate": 7.585233003832965e-06,
|
|
"loss": 0.0465,
|
|
"step": 4260
|
|
},
|
|
{
|
|
"epoch": 2.5584182144997003,
|
|
"grad_norm": 1.7529507875442505,
|
|
"learning_rate": 7.48436554367561e-06,
|
|
"loss": 0.0487,
|
|
"step": 4270
|
|
},
|
|
{
|
|
"epoch": 2.5644098262432595,
|
|
"grad_norm": 1.5366095304489136,
|
|
"learning_rate": 7.383498083518258e-06,
|
|
"loss": 0.0592,
|
|
"step": 4280
|
|
},
|
|
{
|
|
"epoch": 2.5704014379868183,
|
|
"grad_norm": 1.4841970205307007,
|
|
"learning_rate": 7.282630623360905e-06,
|
|
"loss": 0.0469,
|
|
"step": 4290
|
|
},
|
|
{
|
|
"epoch": 2.5763930497303775,
|
|
"grad_norm": 1.1961781978607178,
|
|
"learning_rate": 7.18176316320355e-06,
|
|
"loss": 0.036,
|
|
"step": 4300
|
|
},
|
|
{
|
|
"epoch": 2.5823846614739363,
|
|
"grad_norm": 0.8614912629127502,
|
|
"learning_rate": 7.080895703046198e-06,
|
|
"loss": 0.0426,
|
|
"step": 4310
|
|
},
|
|
{
|
|
"epoch": 2.5883762732174955,
|
|
"grad_norm": 2.814953327178955,
|
|
"learning_rate": 6.980028242888845e-06,
|
|
"loss": 0.0566,
|
|
"step": 4320
|
|
},
|
|
{
|
|
"epoch": 2.5943678849610547,
|
|
"grad_norm": 2.4379405975341797,
|
|
"learning_rate": 6.87916078273149e-06,
|
|
"loss": 0.0594,
|
|
"step": 4330
|
|
},
|
|
{
|
|
"epoch": 2.6003594967046135,
|
|
"grad_norm": 1.8499358892440796,
|
|
"learning_rate": 6.778293322574138e-06,
|
|
"loss": 0.0512,
|
|
"step": 4340
|
|
},
|
|
{
|
|
"epoch": 2.6063511084481723,
|
|
"grad_norm": 1.0436636209487915,
|
|
"learning_rate": 6.677425862416785e-06,
|
|
"loss": 0.0597,
|
|
"step": 4350
|
|
},
|
|
{
|
|
"epoch": 2.6123427201917315,
|
|
"grad_norm": 1.7677953243255615,
|
|
"learning_rate": 6.576558402259432e-06,
|
|
"loss": 0.0546,
|
|
"step": 4360
|
|
},
|
|
{
|
|
"epoch": 2.6183343319352907,
|
|
"grad_norm": 2.6549458503723145,
|
|
"learning_rate": 6.475690942102078e-06,
|
|
"loss": 0.056,
|
|
"step": 4370
|
|
},
|
|
{
|
|
"epoch": 2.6243259436788495,
|
|
"grad_norm": 0.9973770976066589,
|
|
"learning_rate": 6.374823481944725e-06,
|
|
"loss": 0.0497,
|
|
"step": 4380
|
|
},
|
|
{
|
|
"epoch": 2.6303175554224087,
|
|
"grad_norm": 2.5903773307800293,
|
|
"learning_rate": 6.273956021787372e-06,
|
|
"loss": 0.0522,
|
|
"step": 4390
|
|
},
|
|
{
|
|
"epoch": 2.6363091671659675,
|
|
"grad_norm": 1.0804693698883057,
|
|
"learning_rate": 6.173088561630019e-06,
|
|
"loss": 0.0356,
|
|
"step": 4400
|
|
},
|
|
{
|
|
"epoch": 2.6423007789095267,
|
|
"grad_norm": 1.8099851608276367,
|
|
"learning_rate": 6.072221101472665e-06,
|
|
"loss": 0.051,
|
|
"step": 4410
|
|
},
|
|
{
|
|
"epoch": 2.6482923906530855,
|
|
"grad_norm": 1.1087106466293335,
|
|
"learning_rate": 5.971353641315311e-06,
|
|
"loss": 0.0587,
|
|
"step": 4420
|
|
},
|
|
{
|
|
"epoch": 2.6542840023966447,
|
|
"grad_norm": 1.498772144317627,
|
|
"learning_rate": 5.870486181157959e-06,
|
|
"loss": 0.0497,
|
|
"step": 4430
|
|
},
|
|
{
|
|
"epoch": 2.660275614140204,
|
|
"grad_norm": 1.4975790977478027,
|
|
"learning_rate": 5.769618721000605e-06,
|
|
"loss": 0.0544,
|
|
"step": 4440
|
|
},
|
|
{
|
|
"epoch": 2.6662672258837627,
|
|
"grad_norm": 1.380449652671814,
|
|
"learning_rate": 5.668751260843252e-06,
|
|
"loss": 0.0511,
|
|
"step": 4450
|
|
},
|
|
{
|
|
"epoch": 2.6722588376273215,
|
|
"grad_norm": 1.132257103919983,
|
|
"learning_rate": 5.567883800685899e-06,
|
|
"loss": 0.0397,
|
|
"step": 4460
|
|
},
|
|
{
|
|
"epoch": 2.6782504493708807,
|
|
"grad_norm": 1.5480509996414185,
|
|
"learning_rate": 5.467016340528545e-06,
|
|
"loss": 0.0461,
|
|
"step": 4470
|
|
},
|
|
{
|
|
"epoch": 2.68424206111444,
|
|
"grad_norm": 1.469515085220337,
|
|
"learning_rate": 5.366148880371192e-06,
|
|
"loss": 0.0418,
|
|
"step": 4480
|
|
},
|
|
{
|
|
"epoch": 2.6902336728579987,
|
|
"grad_norm": 1.5454354286193848,
|
|
"learning_rate": 5.265281420213839e-06,
|
|
"loss": 0.0492,
|
|
"step": 4490
|
|
},
|
|
{
|
|
"epoch": 2.696225284601558,
|
|
"grad_norm": 1.7315654754638672,
|
|
"learning_rate": 5.164413960056486e-06,
|
|
"loss": 0.0575,
|
|
"step": 4500
|
|
},
|
|
{
|
|
"epoch": 2.7022168963451167,
|
|
"grad_norm": 1.5216388702392578,
|
|
"learning_rate": 5.063546499899132e-06,
|
|
"loss": 0.0491,
|
|
"step": 4510
|
|
},
|
|
{
|
|
"epoch": 2.708208508088676,
|
|
"grad_norm": 1.5422757863998413,
|
|
"learning_rate": 4.96267903974178e-06,
|
|
"loss": 0.0541,
|
|
"step": 4520
|
|
},
|
|
{
|
|
"epoch": 2.7142001198322347,
|
|
"grad_norm": 2.1533663272857666,
|
|
"learning_rate": 4.861811579584427e-06,
|
|
"loss": 0.0492,
|
|
"step": 4530
|
|
},
|
|
{
|
|
"epoch": 2.720191731575794,
|
|
"grad_norm": 1.8315476179122925,
|
|
"learning_rate": 4.760944119427073e-06,
|
|
"loss": 0.0519,
|
|
"step": 4540
|
|
},
|
|
{
|
|
"epoch": 2.7261833433193527,
|
|
"grad_norm": 1.1926990747451782,
|
|
"learning_rate": 4.6600766592697205e-06,
|
|
"loss": 0.0417,
|
|
"step": 4550
|
|
},
|
|
{
|
|
"epoch": 2.732174955062912,
|
|
"grad_norm": 1.0338667631149292,
|
|
"learning_rate": 4.559209199112367e-06,
|
|
"loss": 0.0544,
|
|
"step": 4560
|
|
},
|
|
{
|
|
"epoch": 2.7381665668064707,
|
|
"grad_norm": 1.5781855583190918,
|
|
"learning_rate": 4.458341738955013e-06,
|
|
"loss": 0.043,
|
|
"step": 4570
|
|
},
|
|
{
|
|
"epoch": 2.74415817855003,
|
|
"grad_norm": 1.4570903778076172,
|
|
"learning_rate": 4.3574742787976605e-06,
|
|
"loss": 0.0471,
|
|
"step": 4580
|
|
},
|
|
{
|
|
"epoch": 2.750149790293589,
|
|
"grad_norm": 1.8981298208236694,
|
|
"learning_rate": 4.256606818640307e-06,
|
|
"loss": 0.0445,
|
|
"step": 4590
|
|
},
|
|
{
|
|
"epoch": 2.756141402037148,
|
|
"grad_norm": 1.8429211378097534,
|
|
"learning_rate": 4.155739358482954e-06,
|
|
"loss": 0.0247,
|
|
"step": 4600
|
|
},
|
|
{
|
|
"epoch": 2.7621330137807067,
|
|
"grad_norm": 1.007271409034729,
|
|
"learning_rate": 4.0548718983256006e-06,
|
|
"loss": 0.0408,
|
|
"step": 4610
|
|
},
|
|
{
|
|
"epoch": 2.768124625524266,
|
|
"grad_norm": 1.6639927625656128,
|
|
"learning_rate": 3.954004438168247e-06,
|
|
"loss": 0.0572,
|
|
"step": 4620
|
|
},
|
|
{
|
|
"epoch": 2.774116237267825,
|
|
"grad_norm": 2.048002243041992,
|
|
"learning_rate": 3.853136978010894e-06,
|
|
"loss": 0.0535,
|
|
"step": 4630
|
|
},
|
|
{
|
|
"epoch": 2.780107849011384,
|
|
"grad_norm": 1.2363957166671753,
|
|
"learning_rate": 3.7522695178535406e-06,
|
|
"loss": 0.0381,
|
|
"step": 4640
|
|
},
|
|
{
|
|
"epoch": 2.786099460754943,
|
|
"grad_norm": 1.5503149032592773,
|
|
"learning_rate": 3.651402057696188e-06,
|
|
"loss": 0.0606,
|
|
"step": 4650
|
|
},
|
|
{
|
|
"epoch": 2.792091072498502,
|
|
"grad_norm": 0.9487207531929016,
|
|
"learning_rate": 3.5505345975388343e-06,
|
|
"loss": 0.0441,
|
|
"step": 4660
|
|
},
|
|
{
|
|
"epoch": 2.798082684242061,
|
|
"grad_norm": 1.1120935678482056,
|
|
"learning_rate": 3.4496671373814807e-06,
|
|
"loss": 0.0504,
|
|
"step": 4670
|
|
},
|
|
{
|
|
"epoch": 2.80407429598562,
|
|
"grad_norm": 1.4742374420166016,
|
|
"learning_rate": 3.348799677224128e-06,
|
|
"loss": 0.0482,
|
|
"step": 4680
|
|
},
|
|
{
|
|
"epoch": 2.810065907729179,
|
|
"grad_norm": 1.9465519189834595,
|
|
"learning_rate": 3.2479322170667743e-06,
|
|
"loss": 0.0376,
|
|
"step": 4690
|
|
},
|
|
{
|
|
"epoch": 2.8160575194727384,
|
|
"grad_norm": 1.6899336576461792,
|
|
"learning_rate": 3.1470647569094215e-06,
|
|
"loss": 0.0459,
|
|
"step": 4700
|
|
},
|
|
{
|
|
"epoch": 2.822049131216297,
|
|
"grad_norm": 1.3006277084350586,
|
|
"learning_rate": 3.046197296752068e-06,
|
|
"loss": 0.0426,
|
|
"step": 4710
|
|
},
|
|
{
|
|
"epoch": 2.828040742959856,
|
|
"grad_norm": 1.4417216777801514,
|
|
"learning_rate": 2.9453298365947148e-06,
|
|
"loss": 0.047,
|
|
"step": 4720
|
|
},
|
|
{
|
|
"epoch": 2.834032354703415,
|
|
"grad_norm": 1.2588374614715576,
|
|
"learning_rate": 2.844462376437361e-06,
|
|
"loss": 0.0483,
|
|
"step": 4730
|
|
},
|
|
{
|
|
"epoch": 2.8400239664469744,
|
|
"grad_norm": 1.2371790409088135,
|
|
"learning_rate": 2.743594916280008e-06,
|
|
"loss": 0.0506,
|
|
"step": 4740
|
|
},
|
|
{
|
|
"epoch": 2.846015578190533,
|
|
"grad_norm": 1.0114707946777344,
|
|
"learning_rate": 2.642727456122655e-06,
|
|
"loss": 0.0378,
|
|
"step": 4750
|
|
},
|
|
{
|
|
"epoch": 2.8520071899340924,
|
|
"grad_norm": 2.299523115158081,
|
|
"learning_rate": 2.5418599959653016e-06,
|
|
"loss": 0.0602,
|
|
"step": 4760
|
|
},
|
|
{
|
|
"epoch": 2.857998801677651,
|
|
"grad_norm": 1.0991910696029663,
|
|
"learning_rate": 2.4409925358079484e-06,
|
|
"loss": 0.0536,
|
|
"step": 4770
|
|
},
|
|
{
|
|
"epoch": 2.8639904134212104,
|
|
"grad_norm": 1.8296887874603271,
|
|
"learning_rate": 2.3401250756505953e-06,
|
|
"loss": 0.0381,
|
|
"step": 4780
|
|
},
|
|
{
|
|
"epoch": 2.869982025164769,
|
|
"grad_norm": 1.669256329536438,
|
|
"learning_rate": 2.239257615493242e-06,
|
|
"loss": 0.039,
|
|
"step": 4790
|
|
},
|
|
{
|
|
"epoch": 2.8759736369083284,
|
|
"grad_norm": 1.4977699518203735,
|
|
"learning_rate": 2.138390155335889e-06,
|
|
"loss": 0.0461,
|
|
"step": 4800
|
|
},
|
|
{
|
|
"epoch": 2.8819652486518876,
|
|
"grad_norm": 1.5286149978637695,
|
|
"learning_rate": 2.0375226951785357e-06,
|
|
"loss": 0.0541,
|
|
"step": 4810
|
|
},
|
|
{
|
|
"epoch": 2.8879568603954464,
|
|
"grad_norm": 1.9209306240081787,
|
|
"learning_rate": 1.936655235021182e-06,
|
|
"loss": 0.0584,
|
|
"step": 4820
|
|
},
|
|
{
|
|
"epoch": 2.893948472139005,
|
|
"grad_norm": 2.085172653198242,
|
|
"learning_rate": 1.835787774863829e-06,
|
|
"loss": 0.0417,
|
|
"step": 4830
|
|
},
|
|
{
|
|
"epoch": 2.8999400838825644,
|
|
"grad_norm": 0.9194634556770325,
|
|
"learning_rate": 1.7349203147064758e-06,
|
|
"loss": 0.0412,
|
|
"step": 4840
|
|
},
|
|
{
|
|
"epoch": 2.9059316956261236,
|
|
"grad_norm": 0.8978023529052734,
|
|
"learning_rate": 1.6340528545491226e-06,
|
|
"loss": 0.0475,
|
|
"step": 4850
|
|
},
|
|
{
|
|
"epoch": 2.9119233073696824,
|
|
"grad_norm": 1.5397992134094238,
|
|
"learning_rate": 1.5331853943917692e-06,
|
|
"loss": 0.0411,
|
|
"step": 4860
|
|
},
|
|
{
|
|
"epoch": 2.917914919113241,
|
|
"grad_norm": 2.272191047668457,
|
|
"learning_rate": 1.432317934234416e-06,
|
|
"loss": 0.0442,
|
|
"step": 4870
|
|
},
|
|
{
|
|
"epoch": 2.9239065308568004,
|
|
"grad_norm": 1.2417062520980835,
|
|
"learning_rate": 1.3314504740770626e-06,
|
|
"loss": 0.048,
|
|
"step": 4880
|
|
},
|
|
{
|
|
"epoch": 2.9298981426003596,
|
|
"grad_norm": 0.9558309316635132,
|
|
"learning_rate": 1.2305830139197097e-06,
|
|
"loss": 0.0411,
|
|
"step": 4890
|
|
},
|
|
{
|
|
"epoch": 2.9358897543439184,
|
|
"grad_norm": 1.373119592666626,
|
|
"learning_rate": 1.1297155537623565e-06,
|
|
"loss": 0.055,
|
|
"step": 4900
|
|
},
|
|
{
|
|
"epoch": 2.9418813660874776,
|
|
"grad_norm": 1.019445538520813,
|
|
"learning_rate": 1.028848093605003e-06,
|
|
"loss": 0.0464,
|
|
"step": 4910
|
|
},
|
|
{
|
|
"epoch": 2.9478729778310364,
|
|
"grad_norm": 2.2195796966552734,
|
|
"learning_rate": 9.279806334476499e-07,
|
|
"loss": 0.0584,
|
|
"step": 4920
|
|
},
|
|
{
|
|
"epoch": 2.9538645895745956,
|
|
"grad_norm": 1.278170108795166,
|
|
"learning_rate": 8.271131732902965e-07,
|
|
"loss": 0.0334,
|
|
"step": 4930
|
|
},
|
|
{
|
|
"epoch": 2.9598562013181544,
|
|
"grad_norm": 2.264543294906616,
|
|
"learning_rate": 7.262457131329434e-07,
|
|
"loss": 0.0549,
|
|
"step": 4940
|
|
},
|
|
{
|
|
"epoch": 2.9658478130617136,
|
|
"grad_norm": 1.332661509513855,
|
|
"learning_rate": 6.253782529755901e-07,
|
|
"loss": 0.0483,
|
|
"step": 4950
|
|
},
|
|
{
|
|
"epoch": 2.971839424805273,
|
|
"grad_norm": 0.6639212965965271,
|
|
"learning_rate": 5.245107928182369e-07,
|
|
"loss": 0.0399,
|
|
"step": 4960
|
|
},
|
|
{
|
|
"epoch": 2.9778310365488316,
|
|
"grad_norm": 1.3559752702713013,
|
|
"learning_rate": 4.236433326608836e-07,
|
|
"loss": 0.0456,
|
|
"step": 4970
|
|
},
|
|
{
|
|
"epoch": 2.9838226482923904,
|
|
"grad_norm": 2.040262222290039,
|
|
"learning_rate": 3.2277587250353037e-07,
|
|
"loss": 0.0604,
|
|
"step": 4980
|
|
},
|
|
{
|
|
"epoch": 2.9898142600359496,
|
|
"grad_norm": 1.7835615873336792,
|
|
"learning_rate": 2.2190841234617714e-07,
|
|
"loss": 0.048,
|
|
"step": 4990
|
|
},
|
|
{
|
|
"epoch": 2.995805871779509,
|
|
"grad_norm": 1.4808944463729858,
|
|
"learning_rate": 1.2104095218882388e-07,
|
|
"loss": 0.05,
|
|
"step": 5000
|
|
},
|
|
{
|
|
"epoch": 2.995805871779509,
|
|
"eval_loss": 0.3848415017127991,
|
|
"eval_runtime": 2452.1783,
|
|
"eval_samples_per_second": 0.578,
|
|
"eval_steps_per_second": 0.036,
|
|
"eval_wer": 0.3763063217406202,
|
|
"step": 5000
|
|
}
|
|
],
|
|
"logging_steps": 10,
|
|
"max_steps": 5007,
|
|
"num_input_tokens_seen": 0,
|
|
"num_train_epochs": 3,
|
|
"save_steps": 1000,
|
|
"stateful_callbacks": {
|
|
"TrainerControl": {
|
|
"args": {
|
|
"should_epoch_stop": false,
|
|
"should_evaluate": false,
|
|
"should_log": false,
|
|
"should_save": true,
|
|
"should_training_stop": false
|
|
},
|
|
"attributes": {}
|
|
}
|
|
},
|
|
"total_flos": 2.30868320256e+19,
|
|
"train_batch_size": 16,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|
|
|