|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 16.913319238900634, |
|
"eval_steps": 1000, |
|
"global_step": 20000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.021141649048625793, |
|
"grad_norm": 18.116064071655273, |
|
"learning_rate": 4.6000000000000004e-07, |
|
"loss": 1.1425, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.042283298097251586, |
|
"grad_norm": 12.63290023803711, |
|
"learning_rate": 9.600000000000001e-07, |
|
"loss": 0.9953, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06342494714587738, |
|
"grad_norm": 8.746077537536621, |
|
"learning_rate": 1.46e-06, |
|
"loss": 0.7269, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.08456659619450317, |
|
"grad_norm": 6.509521484375, |
|
"learning_rate": 1.9600000000000003e-06, |
|
"loss": 0.4322, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.10570824524312897, |
|
"grad_norm": 6.591165542602539, |
|
"learning_rate": 2.46e-06, |
|
"loss": 0.3318, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.12684989429175475, |
|
"grad_norm": 5.082007884979248, |
|
"learning_rate": 2.96e-06, |
|
"loss": 0.3381, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.14799154334038056, |
|
"grad_norm": 6.309211254119873, |
|
"learning_rate": 3.46e-06, |
|
"loss": 0.314, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.16913319238900634, |
|
"grad_norm": 5.363051414489746, |
|
"learning_rate": 3.96e-06, |
|
"loss": 0.3026, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.19027484143763213, |
|
"grad_norm": 6.52091121673584, |
|
"learning_rate": 4.4600000000000005e-06, |
|
"loss": 0.3055, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.21141649048625794, |
|
"grad_norm": 4.084770679473877, |
|
"learning_rate": 4.960000000000001e-06, |
|
"loss": 0.2737, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.23255813953488372, |
|
"grad_norm": 5.420108795166016, |
|
"learning_rate": 5.460000000000001e-06, |
|
"loss": 0.2853, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.2536997885835095, |
|
"grad_norm": 4.882087230682373, |
|
"learning_rate": 5.9600000000000005e-06, |
|
"loss": 0.2598, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2748414376321353, |
|
"grad_norm": 5.636622428894043, |
|
"learning_rate": 6.460000000000001e-06, |
|
"loss": 0.2711, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.2959830866807611, |
|
"grad_norm": 6.7103190422058105, |
|
"learning_rate": 6.96e-06, |
|
"loss": 0.238, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3171247357293869, |
|
"grad_norm": 5.882842540740967, |
|
"learning_rate": 7.4600000000000006e-06, |
|
"loss": 0.2454, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.3382663847780127, |
|
"grad_norm": 5.11025857925415, |
|
"learning_rate": 7.960000000000002e-06, |
|
"loss": 0.2611, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.3594080338266385, |
|
"grad_norm": 5.972140312194824, |
|
"learning_rate": 8.46e-06, |
|
"loss": 0.2546, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.38054968287526425, |
|
"grad_norm": 4.569517135620117, |
|
"learning_rate": 8.96e-06, |
|
"loss": 0.2475, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.40169133192389006, |
|
"grad_norm": 5.826624393463135, |
|
"learning_rate": 9.460000000000001e-06, |
|
"loss": 0.246, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.42283298097251587, |
|
"grad_norm": 6.217140197753906, |
|
"learning_rate": 9.960000000000001e-06, |
|
"loss": 0.2592, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.4439746300211416, |
|
"grad_norm": 5.610385417938232, |
|
"learning_rate": 9.988205128205129e-06, |
|
"loss": 0.2435, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.46511627906976744, |
|
"grad_norm": 4.2026567459106445, |
|
"learning_rate": 9.975384615384616e-06, |
|
"loss": 0.2227, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.48625792811839325, |
|
"grad_norm": 5.413006782531738, |
|
"learning_rate": 9.962564102564102e-06, |
|
"loss": 0.2344, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.507399577167019, |
|
"grad_norm": 5.172091960906982, |
|
"learning_rate": 9.94974358974359e-06, |
|
"loss": 0.207, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5285412262156448, |
|
"grad_norm": 3.3823935985565186, |
|
"learning_rate": 9.936923076923078e-06, |
|
"loss": 0.2192, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.5496828752642706, |
|
"grad_norm": 6.060272693634033, |
|
"learning_rate": 9.924102564102565e-06, |
|
"loss": 0.2052, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.5708245243128964, |
|
"grad_norm": 5.090622901916504, |
|
"learning_rate": 9.911282051282051e-06, |
|
"loss": 0.2074, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.5919661733615222, |
|
"grad_norm": 4.698369979858398, |
|
"learning_rate": 9.898461538461538e-06, |
|
"loss": 0.2305, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.6131078224101479, |
|
"grad_norm": 5.381127834320068, |
|
"learning_rate": 9.885641025641027e-06, |
|
"loss": 0.224, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.6342494714587738, |
|
"grad_norm": 4.093654155731201, |
|
"learning_rate": 9.872820512820514e-06, |
|
"loss": 0.189, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.6553911205073996, |
|
"grad_norm": 3.974806070327759, |
|
"learning_rate": 9.86e-06, |
|
"loss": 0.2005, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.6765327695560254, |
|
"grad_norm": 4.721721172332764, |
|
"learning_rate": 9.847179487179487e-06, |
|
"loss": 0.2163, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.6976744186046512, |
|
"grad_norm": 5.002155780792236, |
|
"learning_rate": 9.834358974358976e-06, |
|
"loss": 0.1996, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.718816067653277, |
|
"grad_norm": 5.590403079986572, |
|
"learning_rate": 9.821538461538463e-06, |
|
"loss": 0.2142, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.7399577167019028, |
|
"grad_norm": 4.794340133666992, |
|
"learning_rate": 9.80871794871795e-06, |
|
"loss": 0.2069, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.7610993657505285, |
|
"grad_norm": 3.6653997898101807, |
|
"learning_rate": 9.795897435897436e-06, |
|
"loss": 0.1983, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.7822410147991543, |
|
"grad_norm": 4.958125114440918, |
|
"learning_rate": 9.783076923076925e-06, |
|
"loss": 0.213, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.8033826638477801, |
|
"grad_norm": 4.403783798217773, |
|
"learning_rate": 9.770256410256412e-06, |
|
"loss": 0.2215, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.8245243128964059, |
|
"grad_norm": 5.16312313079834, |
|
"learning_rate": 9.757435897435898e-06, |
|
"loss": 0.2096, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.8456659619450317, |
|
"grad_norm": 4.566516876220703, |
|
"learning_rate": 9.744615384615385e-06, |
|
"loss": 0.1875, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.8456659619450317, |
|
"eval_loss": 0.13999927043914795, |
|
"eval_runtime": 419.9778, |
|
"eval_samples_per_second": 8.67, |
|
"eval_steps_per_second": 0.543, |
|
"eval_wer": 0.10993549584667502, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.8668076109936576, |
|
"grad_norm": 4.138431072235107, |
|
"learning_rate": 9.731794871794872e-06, |
|
"loss": 0.2002, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.8879492600422833, |
|
"grad_norm": 5.494874477386475, |
|
"learning_rate": 9.71897435897436e-06, |
|
"loss": 0.1716, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 3.6946444511413574, |
|
"learning_rate": 9.706153846153847e-06, |
|
"loss": 0.1722, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.9302325581395349, |
|
"grad_norm": 4.946837425231934, |
|
"learning_rate": 9.693333333333334e-06, |
|
"loss": 0.1735, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.9513742071881607, |
|
"grad_norm": 4.48101806640625, |
|
"learning_rate": 9.680512820512821e-06, |
|
"loss": 0.1836, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.9725158562367865, |
|
"grad_norm": 4.336252689361572, |
|
"learning_rate": 9.667692307692308e-06, |
|
"loss": 0.1827, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.9936575052854123, |
|
"grad_norm": 5.601895332336426, |
|
"learning_rate": 9.654871794871795e-06, |
|
"loss": 0.1775, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.014799154334038, |
|
"grad_norm": 2.9764301776885986, |
|
"learning_rate": 9.642051282051282e-06, |
|
"loss": 0.136, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.0359408033826638, |
|
"grad_norm": 3.8662917613983154, |
|
"learning_rate": 9.62923076923077e-06, |
|
"loss": 0.103, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.0570824524312896, |
|
"grad_norm": 2.8110098838806152, |
|
"learning_rate": 9.616410256410257e-06, |
|
"loss": 0.0959, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.0782241014799154, |
|
"grad_norm": 3.119048833847046, |
|
"learning_rate": 9.603589743589744e-06, |
|
"loss": 0.1086, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.0993657505285412, |
|
"grad_norm": 4.480291366577148, |
|
"learning_rate": 9.59076923076923e-06, |
|
"loss": 0.0961, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.120507399577167, |
|
"grad_norm": 2.7335333824157715, |
|
"learning_rate": 9.577948717948719e-06, |
|
"loss": 0.0915, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.1416490486257929, |
|
"grad_norm": 3.7086055278778076, |
|
"learning_rate": 9.565128205128206e-06, |
|
"loss": 0.1077, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.1627906976744187, |
|
"grad_norm": 3.4772427082061768, |
|
"learning_rate": 9.552307692307693e-06, |
|
"loss": 0.0892, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.1839323467230445, |
|
"grad_norm": 2.8368217945098877, |
|
"learning_rate": 9.53948717948718e-06, |
|
"loss": 0.1025, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.20507399577167, |
|
"grad_norm": 4.250027179718018, |
|
"learning_rate": 9.526666666666668e-06, |
|
"loss": 0.0903, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.226215644820296, |
|
"grad_norm": 3.0660858154296875, |
|
"learning_rate": 9.513846153846155e-06, |
|
"loss": 0.0975, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.2473572938689217, |
|
"grad_norm": 2.3566482067108154, |
|
"learning_rate": 9.501025641025642e-06, |
|
"loss": 0.0993, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.2684989429175475, |
|
"grad_norm": 3.3394174575805664, |
|
"learning_rate": 9.488205128205129e-06, |
|
"loss": 0.0928, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.2896405919661733, |
|
"grad_norm": 3.662484645843506, |
|
"learning_rate": 9.475384615384617e-06, |
|
"loss": 0.0999, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.3107822410147991, |
|
"grad_norm": 3.5644421577453613, |
|
"learning_rate": 9.462564102564104e-06, |
|
"loss": 0.092, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.331923890063425, |
|
"grad_norm": 3.3423449993133545, |
|
"learning_rate": 9.44974358974359e-06, |
|
"loss": 0.1178, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.3530655391120507, |
|
"grad_norm": 3.4482710361480713, |
|
"learning_rate": 9.436923076923078e-06, |
|
"loss": 0.1046, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.3742071881606766, |
|
"grad_norm": 3.0157036781311035, |
|
"learning_rate": 9.424102564102564e-06, |
|
"loss": 0.0977, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.3953488372093024, |
|
"grad_norm": 3.4354636669158936, |
|
"learning_rate": 9.411282051282053e-06, |
|
"loss": 0.0853, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.4164904862579282, |
|
"grad_norm": 3.6379215717315674, |
|
"learning_rate": 9.39846153846154e-06, |
|
"loss": 0.1003, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.437632135306554, |
|
"grad_norm": 4.067347049713135, |
|
"learning_rate": 9.385641025641027e-06, |
|
"loss": 0.0795, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.4587737843551798, |
|
"grad_norm": 4.734683513641357, |
|
"learning_rate": 9.372820512820513e-06, |
|
"loss": 0.1152, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.4799154334038054, |
|
"grad_norm": 2.7462713718414307, |
|
"learning_rate": 9.360000000000002e-06, |
|
"loss": 0.0951, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.5010570824524314, |
|
"grad_norm": 3.8516879081726074, |
|
"learning_rate": 9.347179487179487e-06, |
|
"loss": 0.0895, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.522198731501057, |
|
"grad_norm": 4.691688537597656, |
|
"learning_rate": 9.334358974358974e-06, |
|
"loss": 0.1046, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.543340380549683, |
|
"grad_norm": 3.3214614391326904, |
|
"learning_rate": 9.321538461538462e-06, |
|
"loss": 0.0948, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.5644820295983086, |
|
"grad_norm": 3.6700942516326904, |
|
"learning_rate": 9.30871794871795e-06, |
|
"loss": 0.0933, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.5856236786469344, |
|
"grad_norm": 3.1985208988189697, |
|
"learning_rate": 9.295897435897436e-06, |
|
"loss": 0.0826, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.6067653276955602, |
|
"grad_norm": 4.297844886779785, |
|
"learning_rate": 9.283076923076923e-06, |
|
"loss": 0.0886, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.627906976744186, |
|
"grad_norm": 1.9990788698196411, |
|
"learning_rate": 9.270256410256411e-06, |
|
"loss": 0.0846, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.6490486257928119, |
|
"grad_norm": 4.069921970367432, |
|
"learning_rate": 9.257435897435898e-06, |
|
"loss": 0.085, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.6701902748414377, |
|
"grad_norm": 3.8481404781341553, |
|
"learning_rate": 9.244615384615385e-06, |
|
"loss": 0.0806, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.6913319238900635, |
|
"grad_norm": 2.80975604057312, |
|
"learning_rate": 9.231794871794872e-06, |
|
"loss": 0.0852, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.6913319238900635, |
|
"eval_loss": 0.10433211177587509, |
|
"eval_runtime": 421.4968, |
|
"eval_samples_per_second": 8.638, |
|
"eval_steps_per_second": 0.541, |
|
"eval_wer": 0.08566522808482992, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.712473572938689, |
|
"grad_norm": 2.397152900695801, |
|
"learning_rate": 9.21897435897436e-06, |
|
"loss": 0.0969, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 1.733615221987315, |
|
"grad_norm": 3.2101306915283203, |
|
"learning_rate": 9.206153846153847e-06, |
|
"loss": 0.0975, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.7547568710359407, |
|
"grad_norm": 4.012678146362305, |
|
"learning_rate": 9.193333333333334e-06, |
|
"loss": 0.0766, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 1.7758985200845667, |
|
"grad_norm": 2.3309662342071533, |
|
"learning_rate": 9.18051282051282e-06, |
|
"loss": 0.0871, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.7970401691331923, |
|
"grad_norm": 3.0771520137786865, |
|
"learning_rate": 9.16769230769231e-06, |
|
"loss": 0.0933, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 1.8181818181818183, |
|
"grad_norm": 3.2476842403411865, |
|
"learning_rate": 9.154871794871796e-06, |
|
"loss": 0.0824, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.839323467230444, |
|
"grad_norm": 3.9469614028930664, |
|
"learning_rate": 9.142051282051283e-06, |
|
"loss": 0.0867, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 1.8604651162790697, |
|
"grad_norm": 3.2466225624084473, |
|
"learning_rate": 9.12923076923077e-06, |
|
"loss": 0.0773, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.8816067653276956, |
|
"grad_norm": 2.797091245651245, |
|
"learning_rate": 9.116410256410257e-06, |
|
"loss": 0.0826, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 1.9027484143763214, |
|
"grad_norm": 2.5921578407287598, |
|
"learning_rate": 9.103589743589745e-06, |
|
"loss": 0.0748, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.9238900634249472, |
|
"grad_norm": 5.473248481750488, |
|
"learning_rate": 9.090769230769232e-06, |
|
"loss": 0.0747, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 1.945031712473573, |
|
"grad_norm": 3.044085741043091, |
|
"learning_rate": 9.077948717948719e-06, |
|
"loss": 0.0708, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.9661733615221988, |
|
"grad_norm": 3.948669672012329, |
|
"learning_rate": 9.065128205128206e-06, |
|
"loss": 0.0895, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 1.9873150105708244, |
|
"grad_norm": 3.0291192531585693, |
|
"learning_rate": 9.052307692307694e-06, |
|
"loss": 0.0803, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.0084566596194504, |
|
"grad_norm": 1.0824567079544067, |
|
"learning_rate": 9.03948717948718e-06, |
|
"loss": 0.06, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 2.029598308668076, |
|
"grad_norm": 2.6235604286193848, |
|
"learning_rate": 9.026666666666666e-06, |
|
"loss": 0.0374, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.050739957716702, |
|
"grad_norm": 1.5301828384399414, |
|
"learning_rate": 9.013846153846155e-06, |
|
"loss": 0.036, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 2.0718816067653276, |
|
"grad_norm": 1.8513456583023071, |
|
"learning_rate": 9.001025641025641e-06, |
|
"loss": 0.0304, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.0930232558139537, |
|
"grad_norm": 1.9220322370529175, |
|
"learning_rate": 8.988205128205128e-06, |
|
"loss": 0.0337, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 2.1141649048625792, |
|
"grad_norm": 1.8854974508285522, |
|
"learning_rate": 8.975384615384615e-06, |
|
"loss": 0.0289, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.1353065539112053, |
|
"grad_norm": 3.065642833709717, |
|
"learning_rate": 8.962564102564104e-06, |
|
"loss": 0.0364, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 2.156448202959831, |
|
"grad_norm": 1.0566080808639526, |
|
"learning_rate": 8.94974358974359e-06, |
|
"loss": 0.0339, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.177589852008457, |
|
"grad_norm": 1.686890721321106, |
|
"learning_rate": 8.936923076923077e-06, |
|
"loss": 0.0328, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 2.1987315010570825, |
|
"grad_norm": 2.0453619956970215, |
|
"learning_rate": 8.924102564102564e-06, |
|
"loss": 0.0369, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.219873150105708, |
|
"grad_norm": 2.368619680404663, |
|
"learning_rate": 8.911282051282053e-06, |
|
"loss": 0.032, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 2.241014799154334, |
|
"grad_norm": 1.775297999382019, |
|
"learning_rate": 8.89846153846154e-06, |
|
"loss": 0.0315, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.2621564482029597, |
|
"grad_norm": 3.0013630390167236, |
|
"learning_rate": 8.885641025641026e-06, |
|
"loss": 0.0339, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 2.2832980972515857, |
|
"grad_norm": 1.0974608659744263, |
|
"learning_rate": 8.872820512820513e-06, |
|
"loss": 0.0351, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.3044397463002113, |
|
"grad_norm": 3.1183347702026367, |
|
"learning_rate": 8.860000000000002e-06, |
|
"loss": 0.0386, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 2.3255813953488373, |
|
"grad_norm": 1.7240360975265503, |
|
"learning_rate": 8.847179487179488e-06, |
|
"loss": 0.0323, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.346723044397463, |
|
"grad_norm": 3.0883610248565674, |
|
"learning_rate": 8.834358974358975e-06, |
|
"loss": 0.0444, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 2.367864693446089, |
|
"grad_norm": 1.8349878787994385, |
|
"learning_rate": 8.821538461538462e-06, |
|
"loss": 0.0387, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.3890063424947146, |
|
"grad_norm": 3.4535439014434814, |
|
"learning_rate": 8.80871794871795e-06, |
|
"loss": 0.0422, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 2.41014799154334, |
|
"grad_norm": 3.0113954544067383, |
|
"learning_rate": 8.795897435897437e-06, |
|
"loss": 0.0354, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.431289640591966, |
|
"grad_norm": 1.868592619895935, |
|
"learning_rate": 8.783076923076924e-06, |
|
"loss": 0.0278, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 2.452431289640592, |
|
"grad_norm": 2.4312257766723633, |
|
"learning_rate": 8.770256410256411e-06, |
|
"loss": 0.035, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.473572938689218, |
|
"grad_norm": 2.4502596855163574, |
|
"learning_rate": 8.757435897435898e-06, |
|
"loss": 0.0314, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 2.4947145877378434, |
|
"grad_norm": 2.0330417156219482, |
|
"learning_rate": 8.744615384615386e-06, |
|
"loss": 0.0414, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.5158562367864694, |
|
"grad_norm": 1.8342368602752686, |
|
"learning_rate": 8.731794871794873e-06, |
|
"loss": 0.0486, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 2.536997885835095, |
|
"grad_norm": 3.516516923904419, |
|
"learning_rate": 8.718974358974358e-06, |
|
"loss": 0.0387, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.536997885835095, |
|
"eval_loss": 0.09136591106653214, |
|
"eval_runtime": 424.5707, |
|
"eval_samples_per_second": 8.576, |
|
"eval_steps_per_second": 0.537, |
|
"eval_wer": 0.07568796695902362, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.558139534883721, |
|
"grad_norm": 2.264760732650757, |
|
"learning_rate": 8.706153846153847e-06, |
|
"loss": 0.0394, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 2.5792811839323466, |
|
"grad_norm": 2.3247904777526855, |
|
"learning_rate": 8.693333333333334e-06, |
|
"loss": 0.032, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.6004228329809727, |
|
"grad_norm": 2.2518162727355957, |
|
"learning_rate": 8.68051282051282e-06, |
|
"loss": 0.032, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 2.6215644820295982, |
|
"grad_norm": 3.336057424545288, |
|
"learning_rate": 8.667692307692307e-06, |
|
"loss": 0.0354, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.6427061310782243, |
|
"grad_norm": 2.483700752258301, |
|
"learning_rate": 8.654871794871796e-06, |
|
"loss": 0.0371, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 2.66384778012685, |
|
"grad_norm": 1.6576588153839111, |
|
"learning_rate": 8.642051282051283e-06, |
|
"loss": 0.0335, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.6849894291754755, |
|
"grad_norm": 2.4956696033477783, |
|
"learning_rate": 8.62923076923077e-06, |
|
"loss": 0.0253, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 2.7061310782241015, |
|
"grad_norm": 2.75431489944458, |
|
"learning_rate": 8.616410256410256e-06, |
|
"loss": 0.0318, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.7272727272727275, |
|
"grad_norm": 2.210634708404541, |
|
"learning_rate": 8.603589743589745e-06, |
|
"loss": 0.0394, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 2.748414376321353, |
|
"grad_norm": 2.500887632369995, |
|
"learning_rate": 8.590769230769232e-06, |
|
"loss": 0.0404, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.7695560253699787, |
|
"grad_norm": 1.5044671297073364, |
|
"learning_rate": 8.577948717948718e-06, |
|
"loss": 0.031, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 2.7906976744186047, |
|
"grad_norm": 2.0523312091827393, |
|
"learning_rate": 8.565128205128205e-06, |
|
"loss": 0.0324, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.8118393234672303, |
|
"grad_norm": 3.076326608657837, |
|
"learning_rate": 8.552307692307694e-06, |
|
"loss": 0.0319, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 2.8329809725158563, |
|
"grad_norm": 0.9975211024284363, |
|
"learning_rate": 8.53948717948718e-06, |
|
"loss": 0.0327, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.854122621564482, |
|
"grad_norm": 2.296332597732544, |
|
"learning_rate": 8.526666666666667e-06, |
|
"loss": 0.0429, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 2.875264270613108, |
|
"grad_norm": 0.4409531354904175, |
|
"learning_rate": 8.513846153846154e-06, |
|
"loss": 0.0347, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.8964059196617336, |
|
"grad_norm": 1.439736008644104, |
|
"learning_rate": 8.501025641025643e-06, |
|
"loss": 0.0337, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 2.9175475687103596, |
|
"grad_norm": 0.9605826735496521, |
|
"learning_rate": 8.48820512820513e-06, |
|
"loss": 0.0351, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.938689217758985, |
|
"grad_norm": 1.8650295734405518, |
|
"learning_rate": 8.475384615384616e-06, |
|
"loss": 0.0403, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 2.9598308668076108, |
|
"grad_norm": 2.824385643005371, |
|
"learning_rate": 8.462564102564103e-06, |
|
"loss": 0.0384, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.980972515856237, |
|
"grad_norm": 1.5251168012619019, |
|
"learning_rate": 8.44974358974359e-06, |
|
"loss": 0.0334, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 3.0021141649048624, |
|
"grad_norm": 0.8180655837059021, |
|
"learning_rate": 8.436923076923079e-06, |
|
"loss": 0.0313, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 3.0232558139534884, |
|
"grad_norm": 2.1612069606781006, |
|
"learning_rate": 8.424102564102565e-06, |
|
"loss": 0.016, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 3.044397463002114, |
|
"grad_norm": 1.3373942375183105, |
|
"learning_rate": 8.411282051282052e-06, |
|
"loss": 0.0144, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 3.06553911205074, |
|
"grad_norm": 0.7444804310798645, |
|
"learning_rate": 8.398461538461539e-06, |
|
"loss": 0.0203, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 3.0866807610993656, |
|
"grad_norm": 1.6546038389205933, |
|
"learning_rate": 8.385641025641026e-06, |
|
"loss": 0.0109, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 3.1078224101479917, |
|
"grad_norm": 1.1087496280670166, |
|
"learning_rate": 8.372820512820513e-06, |
|
"loss": 0.0139, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 3.1289640591966172, |
|
"grad_norm": 1.7939504384994507, |
|
"learning_rate": 8.36e-06, |
|
"loss": 0.0185, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 3.1501057082452433, |
|
"grad_norm": 1.848943829536438, |
|
"learning_rate": 8.347179487179488e-06, |
|
"loss": 0.0128, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 3.171247357293869, |
|
"grad_norm": 2.3288862705230713, |
|
"learning_rate": 8.334358974358975e-06, |
|
"loss": 0.0132, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 3.192389006342495, |
|
"grad_norm": 2.1424756050109863, |
|
"learning_rate": 8.321538461538462e-06, |
|
"loss": 0.017, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 3.2135306553911205, |
|
"grad_norm": 1.132733941078186, |
|
"learning_rate": 8.308717948717949e-06, |
|
"loss": 0.0181, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 3.234672304439746, |
|
"grad_norm": 1.5112519264221191, |
|
"learning_rate": 8.295897435897437e-06, |
|
"loss": 0.012, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 3.255813953488372, |
|
"grad_norm": 1.1612873077392578, |
|
"learning_rate": 8.283076923076924e-06, |
|
"loss": 0.0142, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 3.276955602536998, |
|
"grad_norm": 2.4286863803863525, |
|
"learning_rate": 8.27025641025641e-06, |
|
"loss": 0.0106, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 3.2980972515856237, |
|
"grad_norm": 2.1390466690063477, |
|
"learning_rate": 8.257435897435898e-06, |
|
"loss": 0.0176, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 3.3192389006342493, |
|
"grad_norm": 3.464160680770874, |
|
"learning_rate": 8.244615384615386e-06, |
|
"loss": 0.0143, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 3.3403805496828753, |
|
"grad_norm": 1.063372254371643, |
|
"learning_rate": 8.231794871794873e-06, |
|
"loss": 0.0196, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 3.361522198731501, |
|
"grad_norm": 0.763058066368103, |
|
"learning_rate": 8.21897435897436e-06, |
|
"loss": 0.0152, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 3.382663847780127, |
|
"grad_norm": 0.30352699756622314, |
|
"learning_rate": 8.206153846153847e-06, |
|
"loss": 0.0153, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 3.382663847780127, |
|
"eval_loss": 0.08595944941043854, |
|
"eval_runtime": 425.6418, |
|
"eval_samples_per_second": 8.554, |
|
"eval_steps_per_second": 0.536, |
|
"eval_wer": 0.08181354123161168, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 3.4038054968287526, |
|
"grad_norm": 0.6997278332710266, |
|
"learning_rate": 8.193333333333335e-06, |
|
"loss": 0.0121, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 3.4249471458773786, |
|
"grad_norm": 2.2743613719940186, |
|
"learning_rate": 8.180512820512822e-06, |
|
"loss": 0.0148, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 3.446088794926004, |
|
"grad_norm": 1.2777513265609741, |
|
"learning_rate": 8.167692307692309e-06, |
|
"loss": 0.0142, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 3.46723044397463, |
|
"grad_norm": 1.5009987354278564, |
|
"learning_rate": 8.154871794871796e-06, |
|
"loss": 0.0191, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 3.488372093023256, |
|
"grad_norm": 1.3110395669937134, |
|
"learning_rate": 8.142051282051282e-06, |
|
"loss": 0.0133, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 3.5095137420718814, |
|
"grad_norm": 1.5313085317611694, |
|
"learning_rate": 8.129230769230771e-06, |
|
"loss": 0.0141, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 3.5306553911205074, |
|
"grad_norm": 1.2553045749664307, |
|
"learning_rate": 8.116410256410258e-06, |
|
"loss": 0.0169, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 3.5517970401691334, |
|
"grad_norm": 1.313785433769226, |
|
"learning_rate": 8.103589743589745e-06, |
|
"loss": 0.0193, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 3.572938689217759, |
|
"grad_norm": 1.203377366065979, |
|
"learning_rate": 8.090769230769231e-06, |
|
"loss": 0.0151, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 3.5940803382663846, |
|
"grad_norm": 0.6605008244514465, |
|
"learning_rate": 8.077948717948718e-06, |
|
"loss": 0.0181, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 3.6152219873150107, |
|
"grad_norm": 1.213782787322998, |
|
"learning_rate": 8.065128205128205e-06, |
|
"loss": 0.019, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 3.6363636363636362, |
|
"grad_norm": 3.021005153656006, |
|
"learning_rate": 8.052307692307692e-06, |
|
"loss": 0.0169, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 3.6575052854122623, |
|
"grad_norm": 1.8544044494628906, |
|
"learning_rate": 8.03948717948718e-06, |
|
"loss": 0.0131, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 3.678646934460888, |
|
"grad_norm": 1.1160951852798462, |
|
"learning_rate": 8.026666666666667e-06, |
|
"loss": 0.0174, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 3.699788583509514, |
|
"grad_norm": 0.6091210246086121, |
|
"learning_rate": 8.013846153846154e-06, |
|
"loss": 0.0125, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 3.7209302325581395, |
|
"grad_norm": 1.5282201766967773, |
|
"learning_rate": 8.00102564102564e-06, |
|
"loss": 0.0193, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 3.7420718816067655, |
|
"grad_norm": 0.671487033367157, |
|
"learning_rate": 7.98820512820513e-06, |
|
"loss": 0.0142, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 3.763213530655391, |
|
"grad_norm": 1.4481194019317627, |
|
"learning_rate": 7.975384615384616e-06, |
|
"loss": 0.0205, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 3.7843551797040167, |
|
"grad_norm": 1.552512288093567, |
|
"learning_rate": 7.962564102564103e-06, |
|
"loss": 0.0156, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 3.8054968287526427, |
|
"grad_norm": 2.821441411972046, |
|
"learning_rate": 7.94974358974359e-06, |
|
"loss": 0.0205, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 3.8266384778012688, |
|
"grad_norm": 1.934550404548645, |
|
"learning_rate": 7.936923076923078e-06, |
|
"loss": 0.014, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 3.8477801268498943, |
|
"grad_norm": 1.8110979795455933, |
|
"learning_rate": 7.924102564102565e-06, |
|
"loss": 0.015, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 3.86892177589852, |
|
"grad_norm": 1.9725172519683838, |
|
"learning_rate": 7.911282051282052e-06, |
|
"loss": 0.0149, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 3.890063424947146, |
|
"grad_norm": 1.2784725427627563, |
|
"learning_rate": 7.898461538461539e-06, |
|
"loss": 0.0146, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 3.9112050739957716, |
|
"grad_norm": 2.2306292057037354, |
|
"learning_rate": 7.885641025641027e-06, |
|
"loss": 0.0163, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 3.9323467230443976, |
|
"grad_norm": 1.8566030263900757, |
|
"learning_rate": 7.872820512820514e-06, |
|
"loss": 0.015, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 3.953488372093023, |
|
"grad_norm": 1.6263699531555176, |
|
"learning_rate": 7.860000000000001e-06, |
|
"loss": 0.0145, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 3.974630021141649, |
|
"grad_norm": 1.2644612789154053, |
|
"learning_rate": 7.847179487179488e-06, |
|
"loss": 0.0158, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 3.995771670190275, |
|
"grad_norm": 0.5040600299835205, |
|
"learning_rate": 7.834871794871795e-06, |
|
"loss": 0.0162, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 4.016913319238901, |
|
"grad_norm": 0.6542588472366333, |
|
"learning_rate": 7.822051282051282e-06, |
|
"loss": 0.009, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 4.038054968287526, |
|
"grad_norm": 1.0600502490997314, |
|
"learning_rate": 7.80923076923077e-06, |
|
"loss": 0.0125, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 4.059196617336152, |
|
"grad_norm": 0.7481040358543396, |
|
"learning_rate": 7.796410256410257e-06, |
|
"loss": 0.0065, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 4.080338266384778, |
|
"grad_norm": 0.9099847078323364, |
|
"learning_rate": 7.783589743589744e-06, |
|
"loss": 0.0057, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 4.101479915433404, |
|
"grad_norm": 0.5291013717651367, |
|
"learning_rate": 7.770769230769231e-06, |
|
"loss": 0.005, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 4.12262156448203, |
|
"grad_norm": 1.499250888824463, |
|
"learning_rate": 7.75794871794872e-06, |
|
"loss": 0.0083, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 4.143763213530655, |
|
"grad_norm": 0.21640999615192413, |
|
"learning_rate": 7.745128205128206e-06, |
|
"loss": 0.0075, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 4.164904862579281, |
|
"grad_norm": 0.20196348428726196, |
|
"learning_rate": 7.732307692307693e-06, |
|
"loss": 0.0088, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 4.186046511627907, |
|
"grad_norm": 0.6973519921302795, |
|
"learning_rate": 7.71948717948718e-06, |
|
"loss": 0.0079, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 4.207188160676533, |
|
"grad_norm": 0.38807210326194763, |
|
"learning_rate": 7.706666666666669e-06, |
|
"loss": 0.005, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 4.2283298097251585, |
|
"grad_norm": 0.19292686879634857, |
|
"learning_rate": 7.693846153846154e-06, |
|
"loss": 0.008, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 4.2283298097251585, |
|
"eval_loss": 0.08777374029159546, |
|
"eval_runtime": 416.8384, |
|
"eval_samples_per_second": 8.735, |
|
"eval_steps_per_second": 0.547, |
|
"eval_wer": 0.06984082788064411, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 4.249471458773784, |
|
"grad_norm": 0.18094445765018463, |
|
"learning_rate": 7.68102564102564e-06, |
|
"loss": 0.0059, |
|
"step": 5025 |
|
}, |
|
{ |
|
"epoch": 4.2706131078224105, |
|
"grad_norm": 1.249588131904602, |
|
"learning_rate": 7.668205128205129e-06, |
|
"loss": 0.0063, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 4.291754756871036, |
|
"grad_norm": 1.3055378198623657, |
|
"learning_rate": 7.655384615384616e-06, |
|
"loss": 0.0074, |
|
"step": 5075 |
|
}, |
|
{ |
|
"epoch": 4.312896405919662, |
|
"grad_norm": 1.6589789390563965, |
|
"learning_rate": 7.642564102564103e-06, |
|
"loss": 0.0094, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 4.334038054968287, |
|
"grad_norm": 0.46987977623939514, |
|
"learning_rate": 7.62974358974359e-06, |
|
"loss": 0.0117, |
|
"step": 5125 |
|
}, |
|
{ |
|
"epoch": 4.355179704016914, |
|
"grad_norm": 0.4591355323791504, |
|
"learning_rate": 7.616923076923077e-06, |
|
"loss": 0.0079, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 4.376321353065539, |
|
"grad_norm": 1.6983137130737305, |
|
"learning_rate": 7.604102564102565e-06, |
|
"loss": 0.0053, |
|
"step": 5175 |
|
}, |
|
{ |
|
"epoch": 4.397463002114165, |
|
"grad_norm": 0.3046073913574219, |
|
"learning_rate": 7.591282051282052e-06, |
|
"loss": 0.0104, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 4.4186046511627906, |
|
"grad_norm": 0.7796456217765808, |
|
"learning_rate": 7.578461538461539e-06, |
|
"loss": 0.0059, |
|
"step": 5225 |
|
}, |
|
{ |
|
"epoch": 4.439746300211416, |
|
"grad_norm": 0.7012127637863159, |
|
"learning_rate": 7.565641025641026e-06, |
|
"loss": 0.0076, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 4.460887949260043, |
|
"grad_norm": 0.6566082835197449, |
|
"learning_rate": 7.552820512820514e-06, |
|
"loss": 0.0085, |
|
"step": 5275 |
|
}, |
|
{ |
|
"epoch": 4.482029598308668, |
|
"grad_norm": 1.1547282934188843, |
|
"learning_rate": 7.540000000000001e-06, |
|
"loss": 0.0082, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 4.503171247357294, |
|
"grad_norm": 1.3107972145080566, |
|
"learning_rate": 7.5271794871794875e-06, |
|
"loss": 0.007, |
|
"step": 5325 |
|
}, |
|
{ |
|
"epoch": 4.524312896405919, |
|
"grad_norm": 1.5770258903503418, |
|
"learning_rate": 7.514358974358975e-06, |
|
"loss": 0.0118, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 4.545454545454545, |
|
"grad_norm": 2.0194594860076904, |
|
"learning_rate": 7.501538461538462e-06, |
|
"loss": 0.0104, |
|
"step": 5375 |
|
}, |
|
{ |
|
"epoch": 4.5665961945031714, |
|
"grad_norm": 1.2425894737243652, |
|
"learning_rate": 7.48871794871795e-06, |
|
"loss": 0.009, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 4.587737843551797, |
|
"grad_norm": 1.568264365196228, |
|
"learning_rate": 7.4758974358974365e-06, |
|
"loss": 0.0095, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 4.608879492600423, |
|
"grad_norm": 0.7294540405273438, |
|
"learning_rate": 7.463076923076924e-06, |
|
"loss": 0.0092, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 4.630021141649049, |
|
"grad_norm": 1.0853856801986694, |
|
"learning_rate": 7.450256410256411e-06, |
|
"loss": 0.0078, |
|
"step": 5475 |
|
}, |
|
{ |
|
"epoch": 4.651162790697675, |
|
"grad_norm": 1.0923691987991333, |
|
"learning_rate": 7.437435897435899e-06, |
|
"loss": 0.0071, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 4.6723044397463, |
|
"grad_norm": 1.096602201461792, |
|
"learning_rate": 7.4246153846153855e-06, |
|
"loss": 0.0072, |
|
"step": 5525 |
|
}, |
|
{ |
|
"epoch": 4.693446088794926, |
|
"grad_norm": 1.0058438777923584, |
|
"learning_rate": 7.411794871794873e-06, |
|
"loss": 0.009, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 4.7145877378435515, |
|
"grad_norm": 0.3397851884365082, |
|
"learning_rate": 7.39897435897436e-06, |
|
"loss": 0.006, |
|
"step": 5575 |
|
}, |
|
{ |
|
"epoch": 4.735729386892178, |
|
"grad_norm": 0.9336525201797485, |
|
"learning_rate": 7.386153846153846e-06, |
|
"loss": 0.0144, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 4.7568710359408035, |
|
"grad_norm": 0.1965409815311432, |
|
"learning_rate": 7.373333333333334e-06, |
|
"loss": 0.0057, |
|
"step": 5625 |
|
}, |
|
{ |
|
"epoch": 4.778012684989429, |
|
"grad_norm": 0.3345631957054138, |
|
"learning_rate": 7.3605128205128204e-06, |
|
"loss": 0.008, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 4.799154334038055, |
|
"grad_norm": 0.8351041674613953, |
|
"learning_rate": 7.347692307692308e-06, |
|
"loss": 0.0063, |
|
"step": 5675 |
|
}, |
|
{ |
|
"epoch": 4.82029598308668, |
|
"grad_norm": 0.6171631217002869, |
|
"learning_rate": 7.334871794871795e-06, |
|
"loss": 0.0056, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 4.841437632135307, |
|
"grad_norm": 0.22757196426391602, |
|
"learning_rate": 7.322051282051283e-06, |
|
"loss": 0.0098, |
|
"step": 5725 |
|
}, |
|
{ |
|
"epoch": 4.862579281183932, |
|
"grad_norm": 2.954468011856079, |
|
"learning_rate": 7.309230769230769e-06, |
|
"loss": 0.0056, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 4.883720930232558, |
|
"grad_norm": 1.6333624124526978, |
|
"learning_rate": 7.296410256410257e-06, |
|
"loss": 0.0067, |
|
"step": 5775 |
|
}, |
|
{ |
|
"epoch": 4.904862579281184, |
|
"grad_norm": 0.10707265883684158, |
|
"learning_rate": 7.283589743589744e-06, |
|
"loss": 0.0055, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 4.92600422832981, |
|
"grad_norm": 0.8286884427070618, |
|
"learning_rate": 7.270769230769232e-06, |
|
"loss": 0.0081, |
|
"step": 5825 |
|
}, |
|
{ |
|
"epoch": 4.947145877378436, |
|
"grad_norm": 0.42702218890190125, |
|
"learning_rate": 7.257948717948718e-06, |
|
"loss": 0.0064, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 4.968287526427061, |
|
"grad_norm": 1.5272870063781738, |
|
"learning_rate": 7.245128205128206e-06, |
|
"loss": 0.006, |
|
"step": 5875 |
|
}, |
|
{ |
|
"epoch": 4.989429175475687, |
|
"grad_norm": 1.0080416202545166, |
|
"learning_rate": 7.232307692307693e-06, |
|
"loss": 0.0064, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 5.010570824524313, |
|
"grad_norm": 1.6429482698440552, |
|
"learning_rate": 7.21948717948718e-06, |
|
"loss": 0.004, |
|
"step": 5925 |
|
}, |
|
{ |
|
"epoch": 5.031712473572939, |
|
"grad_norm": 0.593721866607666, |
|
"learning_rate": 7.206666666666667e-06, |
|
"loss": 0.0033, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 5.052854122621564, |
|
"grad_norm": 0.20310665667057037, |
|
"learning_rate": 7.193846153846154e-06, |
|
"loss": 0.0045, |
|
"step": 5975 |
|
}, |
|
{ |
|
"epoch": 5.07399577167019, |
|
"grad_norm": 1.1912012100219727, |
|
"learning_rate": 7.181025641025642e-06, |
|
"loss": 0.005, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 5.07399577167019, |
|
"eval_loss": 0.08779177069664001, |
|
"eval_runtime": 422.2642, |
|
"eval_samples_per_second": 8.623, |
|
"eval_steps_per_second": 0.54, |
|
"eval_wer": 0.07452782031648801, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 5.0951374207188165, |
|
"grad_norm": 0.7110195159912109, |
|
"learning_rate": 7.168205128205129e-06, |
|
"loss": 0.0077, |
|
"step": 6025 |
|
}, |
|
{ |
|
"epoch": 5.116279069767442, |
|
"grad_norm": 0.11154381185770035, |
|
"learning_rate": 7.155384615384616e-06, |
|
"loss": 0.004, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 5.137420718816068, |
|
"grad_norm": 0.07287934422492981, |
|
"learning_rate": 7.142564102564103e-06, |
|
"loss": 0.003, |
|
"step": 6075 |
|
}, |
|
{ |
|
"epoch": 5.158562367864693, |
|
"grad_norm": 0.07207977771759033, |
|
"learning_rate": 7.129743589743591e-06, |
|
"loss": 0.0029, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 5.179704016913319, |
|
"grad_norm": 0.09618539363145828, |
|
"learning_rate": 7.116923076923078e-06, |
|
"loss": 0.0045, |
|
"step": 6125 |
|
}, |
|
{ |
|
"epoch": 5.200845665961945, |
|
"grad_norm": 0.13538002967834473, |
|
"learning_rate": 7.104102564102565e-06, |
|
"loss": 0.0036, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 5.221987315010571, |
|
"grad_norm": 0.08731002360582352, |
|
"learning_rate": 7.091282051282052e-06, |
|
"loss": 0.0039, |
|
"step": 6175 |
|
}, |
|
{ |
|
"epoch": 5.2431289640591965, |
|
"grad_norm": 1.615174412727356, |
|
"learning_rate": 7.078461538461538e-06, |
|
"loss": 0.0037, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 5.264270613107822, |
|
"grad_norm": 0.844247579574585, |
|
"learning_rate": 7.065641025641026e-06, |
|
"loss": 0.0044, |
|
"step": 6225 |
|
}, |
|
{ |
|
"epoch": 5.2854122621564485, |
|
"grad_norm": 0.36915040016174316, |
|
"learning_rate": 7.052820512820513e-06, |
|
"loss": 0.0039, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 5.306553911205074, |
|
"grad_norm": 0.6561350226402283, |
|
"learning_rate": 7.04e-06, |
|
"loss": 0.0077, |
|
"step": 6275 |
|
}, |
|
{ |
|
"epoch": 5.3276955602537, |
|
"grad_norm": 0.10375604033470154, |
|
"learning_rate": 7.027179487179487e-06, |
|
"loss": 0.0027, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 5.348837209302325, |
|
"grad_norm": 0.11399765312671661, |
|
"learning_rate": 7.014358974358975e-06, |
|
"loss": 0.005, |
|
"step": 6325 |
|
}, |
|
{ |
|
"epoch": 5.369978858350952, |
|
"grad_norm": 0.39025041460990906, |
|
"learning_rate": 7.001538461538462e-06, |
|
"loss": 0.0032, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 5.391120507399577, |
|
"grad_norm": 0.7788843512535095, |
|
"learning_rate": 6.988717948717949e-06, |
|
"loss": 0.0047, |
|
"step": 6375 |
|
}, |
|
{ |
|
"epoch": 5.412262156448203, |
|
"grad_norm": 5.279190540313721, |
|
"learning_rate": 6.975897435897436e-06, |
|
"loss": 0.0046, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 5.4334038054968286, |
|
"grad_norm": 0.554918646812439, |
|
"learning_rate": 6.963076923076924e-06, |
|
"loss": 0.0039, |
|
"step": 6425 |
|
}, |
|
{ |
|
"epoch": 5.454545454545454, |
|
"grad_norm": 0.062186941504478455, |
|
"learning_rate": 6.950256410256411e-06, |
|
"loss": 0.0027, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 5.475687103594081, |
|
"grad_norm": 0.22088727355003357, |
|
"learning_rate": 6.937435897435898e-06, |
|
"loss": 0.004, |
|
"step": 6475 |
|
}, |
|
{ |
|
"epoch": 5.496828752642706, |
|
"grad_norm": 1.3965909481048584, |
|
"learning_rate": 6.924615384615385e-06, |
|
"loss": 0.0046, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 5.517970401691332, |
|
"grad_norm": 0.13480918109416962, |
|
"learning_rate": 6.911794871794872e-06, |
|
"loss": 0.0044, |
|
"step": 6525 |
|
}, |
|
{ |
|
"epoch": 5.539112050739957, |
|
"grad_norm": 0.2856655716896057, |
|
"learning_rate": 6.89897435897436e-06, |
|
"loss": 0.005, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 5.560253699788584, |
|
"grad_norm": 0.8899367451667786, |
|
"learning_rate": 6.8861538461538465e-06, |
|
"loss": 0.0035, |
|
"step": 6575 |
|
}, |
|
{ |
|
"epoch": 5.5813953488372094, |
|
"grad_norm": 1.383070707321167, |
|
"learning_rate": 6.873333333333334e-06, |
|
"loss": 0.0054, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 5.602536997885835, |
|
"grad_norm": 0.2242678701877594, |
|
"learning_rate": 6.860512820512821e-06, |
|
"loss": 0.0074, |
|
"step": 6625 |
|
}, |
|
{ |
|
"epoch": 5.623678646934461, |
|
"grad_norm": 0.3287247121334076, |
|
"learning_rate": 6.847692307692309e-06, |
|
"loss": 0.0068, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 5.644820295983086, |
|
"grad_norm": 0.47697022557258606, |
|
"learning_rate": 6.8348717948717955e-06, |
|
"loss": 0.0036, |
|
"step": 6675 |
|
}, |
|
{ |
|
"epoch": 5.665961945031713, |
|
"grad_norm": 0.23843590915203094, |
|
"learning_rate": 6.822051282051283e-06, |
|
"loss": 0.0053, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 5.687103594080338, |
|
"grad_norm": 0.14214707911014557, |
|
"learning_rate": 6.80923076923077e-06, |
|
"loss": 0.0023, |
|
"step": 6725 |
|
}, |
|
{ |
|
"epoch": 5.708245243128964, |
|
"grad_norm": 0.9868163466453552, |
|
"learning_rate": 6.796410256410258e-06, |
|
"loss": 0.003, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 5.72938689217759, |
|
"grad_norm": 0.6320108771324158, |
|
"learning_rate": 6.7835897435897445e-06, |
|
"loss": 0.0061, |
|
"step": 6775 |
|
}, |
|
{ |
|
"epoch": 5.750528541226216, |
|
"grad_norm": 0.08067617565393448, |
|
"learning_rate": 6.770769230769232e-06, |
|
"loss": 0.0055, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 5.7716701902748415, |
|
"grad_norm": 1.7972437143325806, |
|
"learning_rate": 6.757948717948718e-06, |
|
"loss": 0.0039, |
|
"step": 6825 |
|
}, |
|
{ |
|
"epoch": 5.792811839323467, |
|
"grad_norm": 0.9350019693374634, |
|
"learning_rate": 6.745128205128205e-06, |
|
"loss": 0.0055, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 5.813953488372093, |
|
"grad_norm": 0.21952944993972778, |
|
"learning_rate": 6.732307692307693e-06, |
|
"loss": 0.0037, |
|
"step": 6875 |
|
}, |
|
{ |
|
"epoch": 5.835095137420719, |
|
"grad_norm": 2.4713776111602783, |
|
"learning_rate": 6.7194871794871794e-06, |
|
"loss": 0.0056, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 5.856236786469345, |
|
"grad_norm": 0.4809707999229431, |
|
"learning_rate": 6.706666666666667e-06, |
|
"loss": 0.0046, |
|
"step": 6925 |
|
}, |
|
{ |
|
"epoch": 5.87737843551797, |
|
"grad_norm": 1.1874123811721802, |
|
"learning_rate": 6.693846153846154e-06, |
|
"loss": 0.0046, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 5.898520084566596, |
|
"grad_norm": 0.13787618279457092, |
|
"learning_rate": 6.681025641025642e-06, |
|
"loss": 0.004, |
|
"step": 6975 |
|
}, |
|
{ |
|
"epoch": 5.9196617336152215, |
|
"grad_norm": 1.4916746616363525, |
|
"learning_rate": 6.6682051282051284e-06, |
|
"loss": 0.0033, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 5.9196617336152215, |
|
"eval_loss": 0.08342708647251129, |
|
"eval_runtime": 419.2861, |
|
"eval_samples_per_second": 8.684, |
|
"eval_steps_per_second": 0.544, |
|
"eval_wer": 0.0651074295790988, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 5.940803382663848, |
|
"grad_norm": 2.867432117462158, |
|
"learning_rate": 6.655384615384616e-06, |
|
"loss": 0.0097, |
|
"step": 7025 |
|
}, |
|
{ |
|
"epoch": 5.961945031712474, |
|
"grad_norm": 0.590003490447998, |
|
"learning_rate": 6.642564102564103e-06, |
|
"loss": 0.0032, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 5.983086680761099, |
|
"grad_norm": 2.920905351638794, |
|
"learning_rate": 6.629743589743591e-06, |
|
"loss": 0.005, |
|
"step": 7075 |
|
}, |
|
{ |
|
"epoch": 6.004228329809725, |
|
"grad_norm": 0.9033246040344238, |
|
"learning_rate": 6.616923076923077e-06, |
|
"loss": 0.0062, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 6.025369978858351, |
|
"grad_norm": 5.353052139282227, |
|
"learning_rate": 6.604102564102565e-06, |
|
"loss": 0.004, |
|
"step": 7125 |
|
}, |
|
{ |
|
"epoch": 6.046511627906977, |
|
"grad_norm": 0.0608515739440918, |
|
"learning_rate": 6.591282051282052e-06, |
|
"loss": 0.0027, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 6.067653276955602, |
|
"grad_norm": 0.09016130864620209, |
|
"learning_rate": 6.578461538461539e-06, |
|
"loss": 0.0046, |
|
"step": 7175 |
|
}, |
|
{ |
|
"epoch": 6.088794926004228, |
|
"grad_norm": 1.249212384223938, |
|
"learning_rate": 6.565641025641026e-06, |
|
"loss": 0.0044, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 6.1099365750528545, |
|
"grad_norm": 0.2739676237106323, |
|
"learning_rate": 6.552820512820513e-06, |
|
"loss": 0.0036, |
|
"step": 7225 |
|
}, |
|
{ |
|
"epoch": 6.13107822410148, |
|
"grad_norm": 1.4266902208328247, |
|
"learning_rate": 6.540000000000001e-06, |
|
"loss": 0.0056, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 6.152219873150106, |
|
"grad_norm": 1.1306654214859009, |
|
"learning_rate": 6.527179487179488e-06, |
|
"loss": 0.0023, |
|
"step": 7275 |
|
}, |
|
{ |
|
"epoch": 6.173361522198731, |
|
"grad_norm": 0.24447208642959595, |
|
"learning_rate": 6.514358974358975e-06, |
|
"loss": 0.0025, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 6.194503171247358, |
|
"grad_norm": 1.2917050123214722, |
|
"learning_rate": 6.501538461538462e-06, |
|
"loss": 0.0033, |
|
"step": 7325 |
|
}, |
|
{ |
|
"epoch": 6.215644820295983, |
|
"grad_norm": 0.2974170744419098, |
|
"learning_rate": 6.48871794871795e-06, |
|
"loss": 0.0024, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 6.236786469344609, |
|
"grad_norm": 1.0729445219039917, |
|
"learning_rate": 6.475897435897437e-06, |
|
"loss": 0.0021, |
|
"step": 7375 |
|
}, |
|
{ |
|
"epoch": 6.2579281183932345, |
|
"grad_norm": 0.10066499561071396, |
|
"learning_rate": 6.463076923076924e-06, |
|
"loss": 0.0019, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 6.27906976744186, |
|
"grad_norm": 0.07153913378715515, |
|
"learning_rate": 6.45025641025641e-06, |
|
"loss": 0.0027, |
|
"step": 7425 |
|
}, |
|
{ |
|
"epoch": 6.3002114164904865, |
|
"grad_norm": 0.6892584562301636, |
|
"learning_rate": 6.437435897435897e-06, |
|
"loss": 0.0023, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 6.321353065539112, |
|
"grad_norm": 1.0604571104049683, |
|
"learning_rate": 6.424615384615385e-06, |
|
"loss": 0.0039, |
|
"step": 7475 |
|
}, |
|
{ |
|
"epoch": 6.342494714587738, |
|
"grad_norm": 1.8776297569274902, |
|
"learning_rate": 6.411794871794872e-06, |
|
"loss": 0.002, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 6.363636363636363, |
|
"grad_norm": 0.16421526670455933, |
|
"learning_rate": 6.398974358974359e-06, |
|
"loss": 0.0027, |
|
"step": 7525 |
|
}, |
|
{ |
|
"epoch": 6.38477801268499, |
|
"grad_norm": 0.1744580715894699, |
|
"learning_rate": 6.386153846153846e-06, |
|
"loss": 0.0044, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 6.405919661733615, |
|
"grad_norm": 0.06431788951158524, |
|
"learning_rate": 6.373333333333334e-06, |
|
"loss": 0.0025, |
|
"step": 7575 |
|
}, |
|
{ |
|
"epoch": 6.427061310782241, |
|
"grad_norm": 0.1876668632030487, |
|
"learning_rate": 6.360512820512821e-06, |
|
"loss": 0.0032, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 6.4482029598308666, |
|
"grad_norm": 0.1662682741880417, |
|
"learning_rate": 6.347692307692308e-06, |
|
"loss": 0.0027, |
|
"step": 7625 |
|
}, |
|
{ |
|
"epoch": 6.469344608879492, |
|
"grad_norm": 0.08980126678943634, |
|
"learning_rate": 6.334871794871795e-06, |
|
"loss": 0.0035, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 6.490486257928119, |
|
"grad_norm": 0.13998441398143768, |
|
"learning_rate": 6.322051282051283e-06, |
|
"loss": 0.0015, |
|
"step": 7675 |
|
}, |
|
{ |
|
"epoch": 6.511627906976744, |
|
"grad_norm": 0.08528197556734085, |
|
"learning_rate": 6.30923076923077e-06, |
|
"loss": 0.0039, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 6.53276955602537, |
|
"grad_norm": 0.07632896304130554, |
|
"learning_rate": 6.296410256410257e-06, |
|
"loss": 0.004, |
|
"step": 7725 |
|
}, |
|
{ |
|
"epoch": 6.553911205073996, |
|
"grad_norm": 1.8177125453948975, |
|
"learning_rate": 6.283589743589744e-06, |
|
"loss": 0.0045, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 6.575052854122622, |
|
"grad_norm": 0.3318551778793335, |
|
"learning_rate": 6.270769230769231e-06, |
|
"loss": 0.003, |
|
"step": 7775 |
|
}, |
|
{ |
|
"epoch": 6.5961945031712474, |
|
"grad_norm": 0.18770574033260345, |
|
"learning_rate": 6.257948717948719e-06, |
|
"loss": 0.0026, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 6.617336152219873, |
|
"grad_norm": 0.120570108294487, |
|
"learning_rate": 6.2451282051282055e-06, |
|
"loss": 0.0053, |
|
"step": 7825 |
|
}, |
|
{ |
|
"epoch": 6.638477801268499, |
|
"grad_norm": 8.554120063781738, |
|
"learning_rate": 6.232307692307693e-06, |
|
"loss": 0.0038, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 6.659619450317125, |
|
"grad_norm": 0.31921058893203735, |
|
"learning_rate": 6.21948717948718e-06, |
|
"loss": 0.0031, |
|
"step": 7875 |
|
}, |
|
{ |
|
"epoch": 6.680761099365751, |
|
"grad_norm": 0.34284916520118713, |
|
"learning_rate": 6.206666666666668e-06, |
|
"loss": 0.0037, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 6.701902748414376, |
|
"grad_norm": 1.419690489768982, |
|
"learning_rate": 6.1938461538461545e-06, |
|
"loss": 0.005, |
|
"step": 7925 |
|
}, |
|
{ |
|
"epoch": 6.723044397463002, |
|
"grad_norm": 0.1808280348777771, |
|
"learning_rate": 6.181025641025642e-06, |
|
"loss": 0.0036, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 6.7441860465116275, |
|
"grad_norm": 1.7059262990951538, |
|
"learning_rate": 6.168205128205129e-06, |
|
"loss": 0.002, |
|
"step": 7975 |
|
}, |
|
{ |
|
"epoch": 6.765327695560254, |
|
"grad_norm": 1.1756020784378052, |
|
"learning_rate": 6.155384615384617e-06, |
|
"loss": 0.0029, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 6.765327695560254, |
|
"eval_loss": 0.08152683824300766, |
|
"eval_runtime": 415.4126, |
|
"eval_samples_per_second": 8.765, |
|
"eval_steps_per_second": 0.549, |
|
"eval_wer": 0.06269432456262472, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 6.7864693446088795, |
|
"grad_norm": 0.07017653435468674, |
|
"learning_rate": 6.142564102564103e-06, |
|
"loss": 0.002, |
|
"step": 8025 |
|
}, |
|
{ |
|
"epoch": 6.807610993657505, |
|
"grad_norm": 0.10346279293298721, |
|
"learning_rate": 6.1297435897435895e-06, |
|
"loss": 0.0039, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 6.828752642706131, |
|
"grad_norm": 1.0720205307006836, |
|
"learning_rate": 6.116923076923077e-06, |
|
"loss": 0.0028, |
|
"step": 8075 |
|
}, |
|
{ |
|
"epoch": 6.849894291754757, |
|
"grad_norm": 0.05146399512887001, |
|
"learning_rate": 6.104102564102564e-06, |
|
"loss": 0.0024, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 6.871035940803383, |
|
"grad_norm": 0.5876528024673462, |
|
"learning_rate": 6.091282051282052e-06, |
|
"loss": 0.0034, |
|
"step": 8125 |
|
}, |
|
{ |
|
"epoch": 6.892177589852008, |
|
"grad_norm": 0.4982081353664398, |
|
"learning_rate": 6.0784615384615384e-06, |
|
"loss": 0.0032, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 6.913319238900634, |
|
"grad_norm": 0.23112235963344574, |
|
"learning_rate": 6.065641025641026e-06, |
|
"loss": 0.0019, |
|
"step": 8175 |
|
}, |
|
{ |
|
"epoch": 6.93446088794926, |
|
"grad_norm": 0.19416943192481995, |
|
"learning_rate": 6.052820512820513e-06, |
|
"loss": 0.0045, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 6.955602536997886, |
|
"grad_norm": 1.9558351039886475, |
|
"learning_rate": 6.040000000000001e-06, |
|
"loss": 0.0028, |
|
"step": 8225 |
|
}, |
|
{ |
|
"epoch": 6.976744186046512, |
|
"grad_norm": 0.15180020034313202, |
|
"learning_rate": 6.0271794871794874e-06, |
|
"loss": 0.0026, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 6.997885835095137, |
|
"grad_norm": 0.434987872838974, |
|
"learning_rate": 6.014358974358975e-06, |
|
"loss": 0.0039, |
|
"step": 8275 |
|
}, |
|
{ |
|
"epoch": 7.019027484143764, |
|
"grad_norm": 0.07390391826629639, |
|
"learning_rate": 6.001538461538462e-06, |
|
"loss": 0.0026, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 7.040169133192389, |
|
"grad_norm": 0.3644394874572754, |
|
"learning_rate": 5.98871794871795e-06, |
|
"loss": 0.0022, |
|
"step": 8325 |
|
}, |
|
{ |
|
"epoch": 7.061310782241015, |
|
"grad_norm": 0.0214530136436224, |
|
"learning_rate": 5.9758974358974364e-06, |
|
"loss": 0.0017, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 7.08245243128964, |
|
"grad_norm": 0.05759245902299881, |
|
"learning_rate": 5.963076923076923e-06, |
|
"loss": 0.0011, |
|
"step": 8375 |
|
}, |
|
{ |
|
"epoch": 7.103594080338266, |
|
"grad_norm": 0.40693360567092896, |
|
"learning_rate": 5.950256410256411e-06, |
|
"loss": 0.0015, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 7.1247357293868925, |
|
"grad_norm": 0.03377969190478325, |
|
"learning_rate": 5.937435897435898e-06, |
|
"loss": 0.0017, |
|
"step": 8425 |
|
}, |
|
{ |
|
"epoch": 7.145877378435518, |
|
"grad_norm": 0.07641027122735977, |
|
"learning_rate": 5.9246153846153854e-06, |
|
"loss": 0.0017, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 7.167019027484144, |
|
"grad_norm": 0.03931877389550209, |
|
"learning_rate": 5.911794871794872e-06, |
|
"loss": 0.0042, |
|
"step": 8475 |
|
}, |
|
{ |
|
"epoch": 7.188160676532769, |
|
"grad_norm": 0.8600488901138306, |
|
"learning_rate": 5.89897435897436e-06, |
|
"loss": 0.0012, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 7.209302325581396, |
|
"grad_norm": 0.256000816822052, |
|
"learning_rate": 5.886153846153847e-06, |
|
"loss": 0.0023, |
|
"step": 8525 |
|
}, |
|
{ |
|
"epoch": 7.230443974630021, |
|
"grad_norm": 0.22403153777122498, |
|
"learning_rate": 5.873333333333334e-06, |
|
"loss": 0.0014, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 7.251585623678647, |
|
"grad_norm": 0.7728956341743469, |
|
"learning_rate": 5.860512820512821e-06, |
|
"loss": 0.003, |
|
"step": 8575 |
|
}, |
|
{ |
|
"epoch": 7.2727272727272725, |
|
"grad_norm": 1.0295345783233643, |
|
"learning_rate": 5.848205128205129e-06, |
|
"loss": 0.0045, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 7.293868921775898, |
|
"grad_norm": 0.1343035101890564, |
|
"learning_rate": 5.835384615384616e-06, |
|
"loss": 0.0021, |
|
"step": 8625 |
|
}, |
|
{ |
|
"epoch": 7.3150105708245245, |
|
"grad_norm": 0.027379866689443588, |
|
"learning_rate": 5.822564102564103e-06, |
|
"loss": 0.0016, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 7.33615221987315, |
|
"grad_norm": 0.0675494521856308, |
|
"learning_rate": 5.809743589743591e-06, |
|
"loss": 0.0011, |
|
"step": 8675 |
|
}, |
|
{ |
|
"epoch": 7.357293868921776, |
|
"grad_norm": 0.05748479440808296, |
|
"learning_rate": 5.796923076923078e-06, |
|
"loss": 0.0011, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 7.378435517970401, |
|
"grad_norm": 0.07236622273921967, |
|
"learning_rate": 5.784102564102565e-06, |
|
"loss": 0.0012, |
|
"step": 8725 |
|
}, |
|
{ |
|
"epoch": 7.399577167019028, |
|
"grad_norm": 0.05686989426612854, |
|
"learning_rate": 5.771282051282052e-06, |
|
"loss": 0.0035, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 7.420718816067653, |
|
"grad_norm": 2.922973871231079, |
|
"learning_rate": 5.75846153846154e-06, |
|
"loss": 0.0042, |
|
"step": 8775 |
|
}, |
|
{ |
|
"epoch": 7.441860465116279, |
|
"grad_norm": 0.06991543620824814, |
|
"learning_rate": 5.745641025641027e-06, |
|
"loss": 0.0021, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 7.4630021141649046, |
|
"grad_norm": 0.3402734100818634, |
|
"learning_rate": 5.732820512820513e-06, |
|
"loss": 0.0028, |
|
"step": 8825 |
|
}, |
|
{ |
|
"epoch": 7.484143763213531, |
|
"grad_norm": 0.06663922965526581, |
|
"learning_rate": 5.72e-06, |
|
"loss": 0.0024, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 7.505285412262157, |
|
"grad_norm": 0.0772562250494957, |
|
"learning_rate": 5.707179487179487e-06, |
|
"loss": 0.0026, |
|
"step": 8875 |
|
}, |
|
{ |
|
"epoch": 7.526427061310782, |
|
"grad_norm": 0.13530157506465912, |
|
"learning_rate": 5.694358974358975e-06, |
|
"loss": 0.002, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 7.547568710359408, |
|
"grad_norm": 0.04225620627403259, |
|
"learning_rate": 5.681538461538462e-06, |
|
"loss": 0.003, |
|
"step": 8925 |
|
}, |
|
{ |
|
"epoch": 7.568710359408033, |
|
"grad_norm": 0.06277480721473694, |
|
"learning_rate": 5.668717948717949e-06, |
|
"loss": 0.0041, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 7.58985200845666, |
|
"grad_norm": 0.06269287317991257, |
|
"learning_rate": 5.655897435897436e-06, |
|
"loss": 0.0023, |
|
"step": 8975 |
|
}, |
|
{ |
|
"epoch": 7.6109936575052854, |
|
"grad_norm": 0.06426391750574112, |
|
"learning_rate": 5.643076923076923e-06, |
|
"loss": 0.0014, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 7.6109936575052854, |
|
"eval_loss": 0.08534783869981766, |
|
"eval_runtime": 415.5854, |
|
"eval_samples_per_second": 8.761, |
|
"eval_steps_per_second": 0.549, |
|
"eval_wer": 0.06274073042832615, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 7.632135306553911, |
|
"grad_norm": 0.04878684878349304, |
|
"learning_rate": 5.630256410256411e-06, |
|
"loss": 0.0013, |
|
"step": 9025 |
|
}, |
|
{ |
|
"epoch": 7.653276955602537, |
|
"grad_norm": 0.073283351957798, |
|
"learning_rate": 5.6174358974358974e-06, |
|
"loss": 0.0015, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 7.674418604651163, |
|
"grad_norm": 0.04749315604567528, |
|
"learning_rate": 5.604615384615385e-06, |
|
"loss": 0.0015, |
|
"step": 9075 |
|
}, |
|
{ |
|
"epoch": 7.695560253699789, |
|
"grad_norm": 0.13554809987545013, |
|
"learning_rate": 5.591794871794872e-06, |
|
"loss": 0.0014, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 7.716701902748414, |
|
"grad_norm": 0.03620721027255058, |
|
"learning_rate": 5.57897435897436e-06, |
|
"loss": 0.0023, |
|
"step": 9125 |
|
}, |
|
{ |
|
"epoch": 7.73784355179704, |
|
"grad_norm": 0.9968867301940918, |
|
"learning_rate": 5.566153846153846e-06, |
|
"loss": 0.0026, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 7.758985200845666, |
|
"grad_norm": 0.055259935557842255, |
|
"learning_rate": 5.553333333333334e-06, |
|
"loss": 0.0016, |
|
"step": 9175 |
|
}, |
|
{ |
|
"epoch": 7.780126849894292, |
|
"grad_norm": 1.2477086782455444, |
|
"learning_rate": 5.540512820512821e-06, |
|
"loss": 0.0026, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 7.8012684989429175, |
|
"grad_norm": 0.42810365557670593, |
|
"learning_rate": 5.527692307692309e-06, |
|
"loss": 0.0022, |
|
"step": 9225 |
|
}, |
|
{ |
|
"epoch": 7.822410147991543, |
|
"grad_norm": 0.7914177775382996, |
|
"learning_rate": 5.514871794871795e-06, |
|
"loss": 0.0026, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 7.843551797040169, |
|
"grad_norm": 0.7927113771438599, |
|
"learning_rate": 5.502051282051283e-06, |
|
"loss": 0.0026, |
|
"step": 9275 |
|
}, |
|
{ |
|
"epoch": 7.864693446088795, |
|
"grad_norm": 0.09316157549619675, |
|
"learning_rate": 5.48923076923077e-06, |
|
"loss": 0.0032, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 7.885835095137421, |
|
"grad_norm": 0.21055510640144348, |
|
"learning_rate": 5.476410256410258e-06, |
|
"loss": 0.0023, |
|
"step": 9325 |
|
}, |
|
{ |
|
"epoch": 7.906976744186046, |
|
"grad_norm": 1.5093400478363037, |
|
"learning_rate": 5.463589743589744e-06, |
|
"loss": 0.0028, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 7.928118393234672, |
|
"grad_norm": 0.9146241545677185, |
|
"learning_rate": 5.450769230769232e-06, |
|
"loss": 0.0015, |
|
"step": 9375 |
|
}, |
|
{ |
|
"epoch": 7.949260042283298, |
|
"grad_norm": 0.6534460186958313, |
|
"learning_rate": 5.437948717948719e-06, |
|
"loss": 0.0028, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 7.970401691331924, |
|
"grad_norm": 0.056375931948423386, |
|
"learning_rate": 5.425128205128205e-06, |
|
"loss": 0.0015, |
|
"step": 9425 |
|
}, |
|
{ |
|
"epoch": 7.99154334038055, |
|
"grad_norm": 0.08614702522754669, |
|
"learning_rate": 5.4123076923076925e-06, |
|
"loss": 0.0028, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 8.012684989429175, |
|
"grad_norm": 0.04644578695297241, |
|
"learning_rate": 5.399487179487179e-06, |
|
"loss": 0.0033, |
|
"step": 9475 |
|
}, |
|
{ |
|
"epoch": 8.033826638477802, |
|
"grad_norm": 0.12249679118394852, |
|
"learning_rate": 5.386666666666667e-06, |
|
"loss": 0.002, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 8.054968287526426, |
|
"grad_norm": 0.18292202055454254, |
|
"learning_rate": 5.373846153846154e-06, |
|
"loss": 0.0009, |
|
"step": 9525 |
|
}, |
|
{ |
|
"epoch": 8.076109936575053, |
|
"grad_norm": 0.94189453125, |
|
"learning_rate": 5.3610256410256415e-06, |
|
"loss": 0.0029, |
|
"step": 9550 |
|
}, |
|
{ |
|
"epoch": 8.09725158562368, |
|
"grad_norm": 0.05061868205666542, |
|
"learning_rate": 5.348205128205128e-06, |
|
"loss": 0.0011, |
|
"step": 9575 |
|
}, |
|
{ |
|
"epoch": 8.118393234672304, |
|
"grad_norm": 0.6493793725967407, |
|
"learning_rate": 5.335384615384615e-06, |
|
"loss": 0.0006, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 8.13953488372093, |
|
"grad_norm": 0.5912912487983704, |
|
"learning_rate": 5.322564102564103e-06, |
|
"loss": 0.0015, |
|
"step": 9625 |
|
}, |
|
{ |
|
"epoch": 8.160676532769555, |
|
"grad_norm": 0.036978259682655334, |
|
"learning_rate": 5.30974358974359e-06, |
|
"loss": 0.0021, |
|
"step": 9650 |
|
}, |
|
{ |
|
"epoch": 8.181818181818182, |
|
"grad_norm": 0.13611486554145813, |
|
"learning_rate": 5.296923076923077e-06, |
|
"loss": 0.002, |
|
"step": 9675 |
|
}, |
|
{ |
|
"epoch": 8.202959830866808, |
|
"grad_norm": 1.0610445737838745, |
|
"learning_rate": 5.284102564102564e-06, |
|
"loss": 0.0016, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 8.224101479915433, |
|
"grad_norm": 0.024419546127319336, |
|
"learning_rate": 5.271282051282052e-06, |
|
"loss": 0.004, |
|
"step": 9725 |
|
}, |
|
{ |
|
"epoch": 8.24524312896406, |
|
"grad_norm": 0.07428953051567078, |
|
"learning_rate": 5.258461538461539e-06, |
|
"loss": 0.0014, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 8.266384778012686, |
|
"grad_norm": 1.252059817314148, |
|
"learning_rate": 5.245641025641026e-06, |
|
"loss": 0.0018, |
|
"step": 9775 |
|
}, |
|
{ |
|
"epoch": 8.28752642706131, |
|
"grad_norm": 0.14112520217895508, |
|
"learning_rate": 5.232820512820513e-06, |
|
"loss": 0.0015, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 8.308668076109937, |
|
"grad_norm": 0.05485629662871361, |
|
"learning_rate": 5.220000000000001e-06, |
|
"loss": 0.0017, |
|
"step": 9825 |
|
}, |
|
{ |
|
"epoch": 8.329809725158562, |
|
"grad_norm": 0.9850168228149414, |
|
"learning_rate": 5.207179487179488e-06, |
|
"loss": 0.0034, |
|
"step": 9850 |
|
}, |
|
{ |
|
"epoch": 8.350951374207188, |
|
"grad_norm": 0.2707412838935852, |
|
"learning_rate": 5.194358974358975e-06, |
|
"loss": 0.0024, |
|
"step": 9875 |
|
}, |
|
{ |
|
"epoch": 8.372093023255815, |
|
"grad_norm": 0.7773647308349609, |
|
"learning_rate": 5.181538461538462e-06, |
|
"loss": 0.0023, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 8.39323467230444, |
|
"grad_norm": 0.2679407000541687, |
|
"learning_rate": 5.16871794871795e-06, |
|
"loss": 0.0023, |
|
"step": 9925 |
|
}, |
|
{ |
|
"epoch": 8.414376321353066, |
|
"grad_norm": 0.1327216774225235, |
|
"learning_rate": 5.155897435897437e-06, |
|
"loss": 0.0027, |
|
"step": 9950 |
|
}, |
|
{ |
|
"epoch": 8.43551797040169, |
|
"grad_norm": 0.5626333951950073, |
|
"learning_rate": 5.143076923076924e-06, |
|
"loss": 0.0015, |
|
"step": 9975 |
|
}, |
|
{ |
|
"epoch": 8.456659619450317, |
|
"grad_norm": 0.06341422349214554, |
|
"learning_rate": 5.130256410256411e-06, |
|
"loss": 0.0013, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 8.456659619450317, |
|
"eval_loss": 0.08607128262519836, |
|
"eval_runtime": 417.0751, |
|
"eval_samples_per_second": 8.73, |
|
"eval_steps_per_second": 0.547, |
|
"eval_wer": 0.06413290639936887, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 8.477801268498943, |
|
"grad_norm": 0.04358547925949097, |
|
"learning_rate": 5.117435897435897e-06, |
|
"loss": 0.0018, |
|
"step": 10025 |
|
}, |
|
{ |
|
"epoch": 8.498942917547568, |
|
"grad_norm": 0.11723045259714127, |
|
"learning_rate": 5.104615384615385e-06, |
|
"loss": 0.0034, |
|
"step": 10050 |
|
}, |
|
{ |
|
"epoch": 8.520084566596195, |
|
"grad_norm": 0.034698598086833954, |
|
"learning_rate": 5.091794871794872e-06, |
|
"loss": 0.001, |
|
"step": 10075 |
|
}, |
|
{ |
|
"epoch": 8.541226215644821, |
|
"grad_norm": 0.2308579683303833, |
|
"learning_rate": 5.078974358974359e-06, |
|
"loss": 0.0015, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 8.562367864693446, |
|
"grad_norm": 0.03924379497766495, |
|
"learning_rate": 5.066153846153846e-06, |
|
"loss": 0.0014, |
|
"step": 10125 |
|
}, |
|
{ |
|
"epoch": 8.583509513742072, |
|
"grad_norm": 0.018929792568087578, |
|
"learning_rate": 5.053333333333334e-06, |
|
"loss": 0.0014, |
|
"step": 10150 |
|
}, |
|
{ |
|
"epoch": 8.604651162790697, |
|
"grad_norm": 0.06054197624325752, |
|
"learning_rate": 5.040512820512821e-06, |
|
"loss": 0.0032, |
|
"step": 10175 |
|
}, |
|
{ |
|
"epoch": 8.625792811839323, |
|
"grad_norm": 0.04145154356956482, |
|
"learning_rate": 5.027692307692308e-06, |
|
"loss": 0.002, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 8.64693446088795, |
|
"grad_norm": 0.05452277883887291, |
|
"learning_rate": 5.014871794871795e-06, |
|
"loss": 0.0014, |
|
"step": 10225 |
|
}, |
|
{ |
|
"epoch": 8.668076109936575, |
|
"grad_norm": 0.04759210720658302, |
|
"learning_rate": 5.002051282051282e-06, |
|
"loss": 0.002, |
|
"step": 10250 |
|
}, |
|
{ |
|
"epoch": 8.689217758985201, |
|
"grad_norm": 0.029456498101353645, |
|
"learning_rate": 4.98923076923077e-06, |
|
"loss": 0.001, |
|
"step": 10275 |
|
}, |
|
{ |
|
"epoch": 8.710359408033828, |
|
"grad_norm": 0.062097664922475815, |
|
"learning_rate": 4.9764102564102564e-06, |
|
"loss": 0.001, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 8.731501057082452, |
|
"grad_norm": 0.07016895711421967, |
|
"learning_rate": 4.963589743589744e-06, |
|
"loss": 0.0011, |
|
"step": 10325 |
|
}, |
|
{ |
|
"epoch": 8.752642706131079, |
|
"grad_norm": 2.872891426086426, |
|
"learning_rate": 4.950769230769231e-06, |
|
"loss": 0.0018, |
|
"step": 10350 |
|
}, |
|
{ |
|
"epoch": 8.773784355179703, |
|
"grad_norm": 0.20757004618644714, |
|
"learning_rate": 4.937948717948719e-06, |
|
"loss": 0.0016, |
|
"step": 10375 |
|
}, |
|
{ |
|
"epoch": 8.79492600422833, |
|
"grad_norm": 0.10146087408065796, |
|
"learning_rate": 4.9251282051282054e-06, |
|
"loss": 0.0032, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 8.816067653276956, |
|
"grad_norm": 0.07840722799301147, |
|
"learning_rate": 4.912307692307693e-06, |
|
"loss": 0.001, |
|
"step": 10425 |
|
}, |
|
{ |
|
"epoch": 8.837209302325581, |
|
"grad_norm": 0.0831177830696106, |
|
"learning_rate": 4.89948717948718e-06, |
|
"loss": 0.0012, |
|
"step": 10450 |
|
}, |
|
{ |
|
"epoch": 8.858350951374208, |
|
"grad_norm": 0.6827990412712097, |
|
"learning_rate": 4.886666666666668e-06, |
|
"loss": 0.0016, |
|
"step": 10475 |
|
}, |
|
{ |
|
"epoch": 8.879492600422832, |
|
"grad_norm": 0.25397488474845886, |
|
"learning_rate": 4.873846153846154e-06, |
|
"loss": 0.0017, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 8.900634249471459, |
|
"grad_norm": 0.07114572823047638, |
|
"learning_rate": 4.861025641025641e-06, |
|
"loss": 0.0009, |
|
"step": 10525 |
|
}, |
|
{ |
|
"epoch": 8.921775898520085, |
|
"grad_norm": 0.05897723138332367, |
|
"learning_rate": 4.848205128205128e-06, |
|
"loss": 0.0016, |
|
"step": 10550 |
|
}, |
|
{ |
|
"epoch": 8.94291754756871, |
|
"grad_norm": 0.048447199165821075, |
|
"learning_rate": 4.835384615384616e-06, |
|
"loss": 0.0015, |
|
"step": 10575 |
|
}, |
|
{ |
|
"epoch": 8.964059196617336, |
|
"grad_norm": 0.04842350259423256, |
|
"learning_rate": 4.8225641025641026e-06, |
|
"loss": 0.0016, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 8.985200845665961, |
|
"grad_norm": 0.06469196826219559, |
|
"learning_rate": 4.80974358974359e-06, |
|
"loss": 0.0011, |
|
"step": 10625 |
|
}, |
|
{ |
|
"epoch": 9.006342494714588, |
|
"grad_norm": 0.021318404003977776, |
|
"learning_rate": 4.796923076923077e-06, |
|
"loss": 0.0038, |
|
"step": 10650 |
|
}, |
|
{ |
|
"epoch": 9.027484143763214, |
|
"grad_norm": 0.03229638934135437, |
|
"learning_rate": 4.784102564102565e-06, |
|
"loss": 0.0014, |
|
"step": 10675 |
|
}, |
|
{ |
|
"epoch": 9.048625792811839, |
|
"grad_norm": 0.08799826353788376, |
|
"learning_rate": 4.7712820512820516e-06, |
|
"loss": 0.0026, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 9.069767441860465, |
|
"grad_norm": 0.11259903013706207, |
|
"learning_rate": 4.758461538461539e-06, |
|
"loss": 0.0015, |
|
"step": 10725 |
|
}, |
|
{ |
|
"epoch": 9.090909090909092, |
|
"grad_norm": 0.14656835794448853, |
|
"learning_rate": 4.745641025641026e-06, |
|
"loss": 0.0019, |
|
"step": 10750 |
|
}, |
|
{ |
|
"epoch": 9.112050739957716, |
|
"grad_norm": 0.5436356663703918, |
|
"learning_rate": 4.732820512820514e-06, |
|
"loss": 0.001, |
|
"step": 10775 |
|
}, |
|
{ |
|
"epoch": 9.133192389006343, |
|
"grad_norm": 0.02085413970053196, |
|
"learning_rate": 4.7200000000000005e-06, |
|
"loss": 0.001, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 9.154334038054968, |
|
"grad_norm": 0.04881500452756882, |
|
"learning_rate": 4.707179487179487e-06, |
|
"loss": 0.001, |
|
"step": 10825 |
|
}, |
|
{ |
|
"epoch": 9.175475687103594, |
|
"grad_norm": 0.04903466999530792, |
|
"learning_rate": 4.694358974358974e-06, |
|
"loss": 0.0008, |
|
"step": 10850 |
|
}, |
|
{ |
|
"epoch": 9.19661733615222, |
|
"grad_norm": 0.020907558500766754, |
|
"learning_rate": 4.681538461538462e-06, |
|
"loss": 0.0014, |
|
"step": 10875 |
|
}, |
|
{ |
|
"epoch": 9.217758985200845, |
|
"grad_norm": 0.13889507949352264, |
|
"learning_rate": 4.668717948717949e-06, |
|
"loss": 0.0014, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 9.238900634249472, |
|
"grad_norm": 0.050613515079021454, |
|
"learning_rate": 4.655897435897436e-06, |
|
"loss": 0.0032, |
|
"step": 10925 |
|
}, |
|
{ |
|
"epoch": 9.260042283298096, |
|
"grad_norm": 1.744171380996704, |
|
"learning_rate": 4.643076923076923e-06, |
|
"loss": 0.0011, |
|
"step": 10950 |
|
}, |
|
{ |
|
"epoch": 9.281183932346723, |
|
"grad_norm": 0.013857314363121986, |
|
"learning_rate": 4.630256410256411e-06, |
|
"loss": 0.0011, |
|
"step": 10975 |
|
}, |
|
{ |
|
"epoch": 9.30232558139535, |
|
"grad_norm": 0.03641614317893982, |
|
"learning_rate": 4.617435897435898e-06, |
|
"loss": 0.0005, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 9.30232558139535, |
|
"eval_loss": 0.08570627868175507, |
|
"eval_runtime": 422.6796, |
|
"eval_samples_per_second": 8.614, |
|
"eval_steps_per_second": 0.539, |
|
"eval_wer": 0.06334400668244466, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 9.323467230443974, |
|
"grad_norm": 0.32879745960235596, |
|
"learning_rate": 4.604615384615385e-06, |
|
"loss": 0.0007, |
|
"step": 11025 |
|
}, |
|
{ |
|
"epoch": 9.3446088794926, |
|
"grad_norm": 0.21095559000968933, |
|
"learning_rate": 4.591794871794872e-06, |
|
"loss": 0.0014, |
|
"step": 11050 |
|
}, |
|
{ |
|
"epoch": 9.365750528541227, |
|
"grad_norm": 0.01756209135055542, |
|
"learning_rate": 4.57897435897436e-06, |
|
"loss": 0.0025, |
|
"step": 11075 |
|
}, |
|
{ |
|
"epoch": 9.386892177589852, |
|
"grad_norm": 0.04053978621959686, |
|
"learning_rate": 4.566153846153847e-06, |
|
"loss": 0.0005, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 9.408033826638478, |
|
"grad_norm": 1.196875810623169, |
|
"learning_rate": 4.5533333333333335e-06, |
|
"loss": 0.0015, |
|
"step": 11125 |
|
}, |
|
{ |
|
"epoch": 9.429175475687103, |
|
"grad_norm": 0.027264421805739403, |
|
"learning_rate": 4.54051282051282e-06, |
|
"loss": 0.0005, |
|
"step": 11150 |
|
}, |
|
{ |
|
"epoch": 9.45031712473573, |
|
"grad_norm": 0.021927664056420326, |
|
"learning_rate": 4.527692307692308e-06, |
|
"loss": 0.0013, |
|
"step": 11175 |
|
}, |
|
{ |
|
"epoch": 9.471458773784356, |
|
"grad_norm": 0.04593001306056976, |
|
"learning_rate": 4.514871794871795e-06, |
|
"loss": 0.0013, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 9.49260042283298, |
|
"grad_norm": 0.8032304644584656, |
|
"learning_rate": 4.5020512820512825e-06, |
|
"loss": 0.0015, |
|
"step": 11225 |
|
}, |
|
{ |
|
"epoch": 9.513742071881607, |
|
"grad_norm": 0.6102417707443237, |
|
"learning_rate": 4.489230769230769e-06, |
|
"loss": 0.0009, |
|
"step": 11250 |
|
}, |
|
{ |
|
"epoch": 9.534883720930232, |
|
"grad_norm": 0.029139434918761253, |
|
"learning_rate": 4.476410256410257e-06, |
|
"loss": 0.0011, |
|
"step": 11275 |
|
}, |
|
{ |
|
"epoch": 9.556025369978858, |
|
"grad_norm": 0.01795080117881298, |
|
"learning_rate": 4.463589743589744e-06, |
|
"loss": 0.0008, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 9.577167019027485, |
|
"grad_norm": 0.029438558965921402, |
|
"learning_rate": 4.4507692307692315e-06, |
|
"loss": 0.0009, |
|
"step": 11325 |
|
}, |
|
{ |
|
"epoch": 9.59830866807611, |
|
"grad_norm": 0.03739332780241966, |
|
"learning_rate": 4.437948717948718e-06, |
|
"loss": 0.0006, |
|
"step": 11350 |
|
}, |
|
{ |
|
"epoch": 9.619450317124736, |
|
"grad_norm": 0.11360177397727966, |
|
"learning_rate": 4.425128205128206e-06, |
|
"loss": 0.001, |
|
"step": 11375 |
|
}, |
|
{ |
|
"epoch": 9.640591966173362, |
|
"grad_norm": 0.03182990849018097, |
|
"learning_rate": 4.412307692307693e-06, |
|
"loss": 0.0008, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 9.661733615221987, |
|
"grad_norm": 0.023038147017359734, |
|
"learning_rate": 4.39948717948718e-06, |
|
"loss": 0.001, |
|
"step": 11425 |
|
}, |
|
{ |
|
"epoch": 9.682875264270614, |
|
"grad_norm": 0.03209986910223961, |
|
"learning_rate": 4.3866666666666665e-06, |
|
"loss": 0.0007, |
|
"step": 11450 |
|
}, |
|
{ |
|
"epoch": 9.704016913319238, |
|
"grad_norm": 0.028583193197846413, |
|
"learning_rate": 4.373846153846154e-06, |
|
"loss": 0.0004, |
|
"step": 11475 |
|
}, |
|
{ |
|
"epoch": 9.725158562367865, |
|
"grad_norm": 0.023485548794269562, |
|
"learning_rate": 4.361025641025641e-06, |
|
"loss": 0.0007, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 9.746300211416491, |
|
"grad_norm": 0.3672647476196289, |
|
"learning_rate": 4.348205128205129e-06, |
|
"loss": 0.0008, |
|
"step": 11525 |
|
}, |
|
{ |
|
"epoch": 9.767441860465116, |
|
"grad_norm": 0.8596897125244141, |
|
"learning_rate": 4.3353846153846154e-06, |
|
"loss": 0.0012, |
|
"step": 11550 |
|
}, |
|
{ |
|
"epoch": 9.788583509513742, |
|
"grad_norm": 1.4410885572433472, |
|
"learning_rate": 4.322564102564103e-06, |
|
"loss": 0.001, |
|
"step": 11575 |
|
}, |
|
{ |
|
"epoch": 9.809725158562367, |
|
"grad_norm": 0.023211924359202385, |
|
"learning_rate": 4.30974358974359e-06, |
|
"loss": 0.0005, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 9.830866807610994, |
|
"grad_norm": 0.032794274389743805, |
|
"learning_rate": 4.296923076923078e-06, |
|
"loss": 0.0008, |
|
"step": 11625 |
|
}, |
|
{ |
|
"epoch": 9.85200845665962, |
|
"grad_norm": 0.037845127284526825, |
|
"learning_rate": 4.2841025641025644e-06, |
|
"loss": 0.0012, |
|
"step": 11650 |
|
}, |
|
{ |
|
"epoch": 9.873150105708245, |
|
"grad_norm": 0.0936095118522644, |
|
"learning_rate": 4.271282051282052e-06, |
|
"loss": 0.0012, |
|
"step": 11675 |
|
}, |
|
{ |
|
"epoch": 9.894291754756871, |
|
"grad_norm": 0.06250233203172684, |
|
"learning_rate": 4.258461538461539e-06, |
|
"loss": 0.0005, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 9.915433403805498, |
|
"grad_norm": 0.040198661386966705, |
|
"learning_rate": 4.245641025641026e-06, |
|
"loss": 0.0009, |
|
"step": 11725 |
|
}, |
|
{ |
|
"epoch": 9.936575052854122, |
|
"grad_norm": 0.02659662999212742, |
|
"learning_rate": 4.232820512820513e-06, |
|
"loss": 0.0004, |
|
"step": 11750 |
|
}, |
|
{ |
|
"epoch": 9.957716701902749, |
|
"grad_norm": 0.021418403834104538, |
|
"learning_rate": 4.22e-06, |
|
"loss": 0.0004, |
|
"step": 11775 |
|
}, |
|
{ |
|
"epoch": 9.978858350951374, |
|
"grad_norm": 0.2249915599822998, |
|
"learning_rate": 4.207179487179487e-06, |
|
"loss": 0.0005, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 0.8315058350563049, |
|
"learning_rate": 4.194358974358975e-06, |
|
"loss": 0.0011, |
|
"step": 11825 |
|
}, |
|
{ |
|
"epoch": 10.021141649048626, |
|
"grad_norm": 0.1930394470691681, |
|
"learning_rate": 4.1815384615384616e-06, |
|
"loss": 0.001, |
|
"step": 11850 |
|
}, |
|
{ |
|
"epoch": 10.042283298097251, |
|
"grad_norm": 0.01425615418702364, |
|
"learning_rate": 4.168717948717949e-06, |
|
"loss": 0.0012, |
|
"step": 11875 |
|
}, |
|
{ |
|
"epoch": 10.063424947145878, |
|
"grad_norm": 0.023017780855298042, |
|
"learning_rate": 4.155897435897436e-06, |
|
"loss": 0.0011, |
|
"step": 11900 |
|
}, |
|
{ |
|
"epoch": 10.084566596194502, |
|
"grad_norm": 0.029264431446790695, |
|
"learning_rate": 4.143076923076924e-06, |
|
"loss": 0.0011, |
|
"step": 11925 |
|
}, |
|
{ |
|
"epoch": 10.105708245243129, |
|
"grad_norm": 0.04874871298670769, |
|
"learning_rate": 4.1302564102564106e-06, |
|
"loss": 0.0005, |
|
"step": 11950 |
|
}, |
|
{ |
|
"epoch": 10.126849894291755, |
|
"grad_norm": 0.745508074760437, |
|
"learning_rate": 4.117435897435898e-06, |
|
"loss": 0.0015, |
|
"step": 11975 |
|
}, |
|
{ |
|
"epoch": 10.14799154334038, |
|
"grad_norm": 0.03195907920598984, |
|
"learning_rate": 4.104615384615385e-06, |
|
"loss": 0.0005, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 10.14799154334038, |
|
"eval_loss": 0.08556105196475983, |
|
"eval_runtime": 420.9801, |
|
"eval_samples_per_second": 8.649, |
|
"eval_steps_per_second": 0.542, |
|
"eval_wer": 0.06195183071140192, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 10.169133192389006, |
|
"grad_norm": 0.029029415920376778, |
|
"learning_rate": 4.091794871794872e-06, |
|
"loss": 0.0004, |
|
"step": 12025 |
|
}, |
|
{ |
|
"epoch": 10.190274841437633, |
|
"grad_norm": 0.029037466272711754, |
|
"learning_rate": 4.078974358974359e-06, |
|
"loss": 0.0004, |
|
"step": 12050 |
|
}, |
|
{ |
|
"epoch": 10.211416490486258, |
|
"grad_norm": 0.1034269854426384, |
|
"learning_rate": 4.066153846153846e-06, |
|
"loss": 0.0009, |
|
"step": 12075 |
|
}, |
|
{ |
|
"epoch": 10.232558139534884, |
|
"grad_norm": 0.025130081921815872, |
|
"learning_rate": 4.053333333333333e-06, |
|
"loss": 0.0009, |
|
"step": 12100 |
|
}, |
|
{ |
|
"epoch": 10.253699788583509, |
|
"grad_norm": 0.05509277433156967, |
|
"learning_rate": 4.040512820512821e-06, |
|
"loss": 0.0003, |
|
"step": 12125 |
|
}, |
|
{ |
|
"epoch": 10.274841437632135, |
|
"grad_norm": 0.022796448320150375, |
|
"learning_rate": 4.027692307692308e-06, |
|
"loss": 0.0004, |
|
"step": 12150 |
|
}, |
|
{ |
|
"epoch": 10.295983086680762, |
|
"grad_norm": 0.03756018355488777, |
|
"learning_rate": 4.014871794871795e-06, |
|
"loss": 0.0008, |
|
"step": 12175 |
|
}, |
|
{ |
|
"epoch": 10.317124735729386, |
|
"grad_norm": 0.6810747385025024, |
|
"learning_rate": 4.002051282051282e-06, |
|
"loss": 0.0008, |
|
"step": 12200 |
|
}, |
|
{ |
|
"epoch": 10.338266384778013, |
|
"grad_norm": 0.2094302922487259, |
|
"learning_rate": 3.98923076923077e-06, |
|
"loss": 0.0016, |
|
"step": 12225 |
|
}, |
|
{ |
|
"epoch": 10.359408033826638, |
|
"grad_norm": 0.02640734240412712, |
|
"learning_rate": 3.976410256410257e-06, |
|
"loss": 0.0007, |
|
"step": 12250 |
|
}, |
|
{ |
|
"epoch": 10.380549682875264, |
|
"grad_norm": 0.015162879601120949, |
|
"learning_rate": 3.963589743589744e-06, |
|
"loss": 0.0006, |
|
"step": 12275 |
|
}, |
|
{ |
|
"epoch": 10.40169133192389, |
|
"grad_norm": 0.023550117388367653, |
|
"learning_rate": 3.950769230769231e-06, |
|
"loss": 0.0007, |
|
"step": 12300 |
|
}, |
|
{ |
|
"epoch": 10.422832980972515, |
|
"grad_norm": 1.2513926029205322, |
|
"learning_rate": 3.937948717948718e-06, |
|
"loss": 0.0012, |
|
"step": 12325 |
|
}, |
|
{ |
|
"epoch": 10.443974630021142, |
|
"grad_norm": 1.0258477926254272, |
|
"learning_rate": 3.925128205128205e-06, |
|
"loss": 0.0012, |
|
"step": 12350 |
|
}, |
|
{ |
|
"epoch": 10.465116279069768, |
|
"grad_norm": 1.8391358852386475, |
|
"learning_rate": 3.9123076923076925e-06, |
|
"loss": 0.0008, |
|
"step": 12375 |
|
}, |
|
{ |
|
"epoch": 10.486257928118393, |
|
"grad_norm": 0.02079303376376629, |
|
"learning_rate": 3.899487179487179e-06, |
|
"loss": 0.001, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 10.50739957716702, |
|
"grad_norm": 0.3406016230583191, |
|
"learning_rate": 3.886666666666667e-06, |
|
"loss": 0.0013, |
|
"step": 12425 |
|
}, |
|
{ |
|
"epoch": 10.528541226215644, |
|
"grad_norm": 0.08087711036205292, |
|
"learning_rate": 3.873846153846154e-06, |
|
"loss": 0.0018, |
|
"step": 12450 |
|
}, |
|
{ |
|
"epoch": 10.54968287526427, |
|
"grad_norm": 0.03195007145404816, |
|
"learning_rate": 3.8610256410256415e-06, |
|
"loss": 0.0007, |
|
"step": 12475 |
|
}, |
|
{ |
|
"epoch": 10.570824524312897, |
|
"grad_norm": 0.0327339693903923, |
|
"learning_rate": 3.848205128205128e-06, |
|
"loss": 0.0005, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 10.591966173361522, |
|
"grad_norm": 0.050105396658182144, |
|
"learning_rate": 3.835384615384616e-06, |
|
"loss": 0.0005, |
|
"step": 12525 |
|
}, |
|
{ |
|
"epoch": 10.613107822410148, |
|
"grad_norm": 0.04684900864958763, |
|
"learning_rate": 3.822564102564103e-06, |
|
"loss": 0.0007, |
|
"step": 12550 |
|
}, |
|
{ |
|
"epoch": 10.634249471458773, |
|
"grad_norm": 0.01622653938829899, |
|
"learning_rate": 3.80974358974359e-06, |
|
"loss": 0.0011, |
|
"step": 12575 |
|
}, |
|
{ |
|
"epoch": 10.6553911205074, |
|
"grad_norm": 0.03434791415929794, |
|
"learning_rate": 3.7969230769230773e-06, |
|
"loss": 0.0007, |
|
"step": 12600 |
|
}, |
|
{ |
|
"epoch": 10.676532769556026, |
|
"grad_norm": 0.016083180904388428, |
|
"learning_rate": 3.784102564102564e-06, |
|
"loss": 0.0005, |
|
"step": 12625 |
|
}, |
|
{ |
|
"epoch": 10.69767441860465, |
|
"grad_norm": 0.019794505089521408, |
|
"learning_rate": 3.7712820512820514e-06, |
|
"loss": 0.0005, |
|
"step": 12650 |
|
}, |
|
{ |
|
"epoch": 10.718816067653277, |
|
"grad_norm": 0.019432729110121727, |
|
"learning_rate": 3.7584615384615386e-06, |
|
"loss": 0.0006, |
|
"step": 12675 |
|
}, |
|
{ |
|
"epoch": 10.739957716701904, |
|
"grad_norm": 0.021112432703375816, |
|
"learning_rate": 3.745641025641026e-06, |
|
"loss": 0.0007, |
|
"step": 12700 |
|
}, |
|
{ |
|
"epoch": 10.761099365750528, |
|
"grad_norm": 0.02420271746814251, |
|
"learning_rate": 3.732820512820513e-06, |
|
"loss": 0.0012, |
|
"step": 12725 |
|
}, |
|
{ |
|
"epoch": 10.782241014799155, |
|
"grad_norm": 0.044272564351558685, |
|
"learning_rate": 3.7200000000000004e-06, |
|
"loss": 0.0004, |
|
"step": 12750 |
|
}, |
|
{ |
|
"epoch": 10.80338266384778, |
|
"grad_norm": 0.6784161329269409, |
|
"learning_rate": 3.7071794871794876e-06, |
|
"loss": 0.0013, |
|
"step": 12775 |
|
}, |
|
{ |
|
"epoch": 10.824524312896406, |
|
"grad_norm": 0.110283263027668, |
|
"learning_rate": 3.694358974358975e-06, |
|
"loss": 0.0006, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 10.845665961945032, |
|
"grad_norm": 0.026112297549843788, |
|
"learning_rate": 3.681538461538462e-06, |
|
"loss": 0.0011, |
|
"step": 12825 |
|
}, |
|
{ |
|
"epoch": 10.866807610993657, |
|
"grad_norm": 0.03150051832199097, |
|
"learning_rate": 3.6687179487179494e-06, |
|
"loss": 0.0004, |
|
"step": 12850 |
|
}, |
|
{ |
|
"epoch": 10.887949260042284, |
|
"grad_norm": 0.08290383219718933, |
|
"learning_rate": 3.655897435897436e-06, |
|
"loss": 0.0008, |
|
"step": 12875 |
|
}, |
|
{ |
|
"epoch": 10.909090909090908, |
|
"grad_norm": 0.018157586455345154, |
|
"learning_rate": 3.6430769230769234e-06, |
|
"loss": 0.0008, |
|
"step": 12900 |
|
}, |
|
{ |
|
"epoch": 10.930232558139535, |
|
"grad_norm": 0.04915007948875427, |
|
"learning_rate": 3.6302564102564103e-06, |
|
"loss": 0.0007, |
|
"step": 12925 |
|
}, |
|
{ |
|
"epoch": 10.951374207188161, |
|
"grad_norm": 0.023294847458600998, |
|
"learning_rate": 3.6174358974358975e-06, |
|
"loss": 0.0014, |
|
"step": 12950 |
|
}, |
|
{ |
|
"epoch": 10.972515856236786, |
|
"grad_norm": 0.013834277167916298, |
|
"learning_rate": 3.6046153846153848e-06, |
|
"loss": 0.0005, |
|
"step": 12975 |
|
}, |
|
{ |
|
"epoch": 10.993657505285412, |
|
"grad_norm": 0.018607119098305702, |
|
"learning_rate": 3.591794871794872e-06, |
|
"loss": 0.0007, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 10.993657505285412, |
|
"eval_loss": 0.08658287674188614, |
|
"eval_runtime": 413.9659, |
|
"eval_samples_per_second": 8.795, |
|
"eval_steps_per_second": 0.551, |
|
"eval_wer": 0.060513248874657755, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 11.014799154334039, |
|
"grad_norm": 0.034428298473358154, |
|
"learning_rate": 3.5789743589743593e-06, |
|
"loss": 0.0002, |
|
"step": 13025 |
|
}, |
|
{ |
|
"epoch": 11.035940803382664, |
|
"grad_norm": 0.024136802181601524, |
|
"learning_rate": 3.5661538461538465e-06, |
|
"loss": 0.001, |
|
"step": 13050 |
|
}, |
|
{ |
|
"epoch": 11.05708245243129, |
|
"grad_norm": 0.014073341153562069, |
|
"learning_rate": 3.5533333333333338e-06, |
|
"loss": 0.0002, |
|
"step": 13075 |
|
}, |
|
{ |
|
"epoch": 11.078224101479915, |
|
"grad_norm": 0.015313107520341873, |
|
"learning_rate": 3.540512820512821e-06, |
|
"loss": 0.0002, |
|
"step": 13100 |
|
}, |
|
{ |
|
"epoch": 11.099365750528541, |
|
"grad_norm": 0.014939980581402779, |
|
"learning_rate": 3.5276923076923083e-06, |
|
"loss": 0.0006, |
|
"step": 13125 |
|
}, |
|
{ |
|
"epoch": 11.120507399577168, |
|
"grad_norm": 0.054304271936416626, |
|
"learning_rate": 3.5148717948717955e-06, |
|
"loss": 0.0006, |
|
"step": 13150 |
|
}, |
|
{ |
|
"epoch": 11.141649048625792, |
|
"grad_norm": 0.01899879425764084, |
|
"learning_rate": 3.5020512820512823e-06, |
|
"loss": 0.0005, |
|
"step": 13175 |
|
}, |
|
{ |
|
"epoch": 11.162790697674419, |
|
"grad_norm": 0.008164430968463421, |
|
"learning_rate": 3.4892307692307696e-06, |
|
"loss": 0.0002, |
|
"step": 13200 |
|
}, |
|
{ |
|
"epoch": 11.183932346723044, |
|
"grad_norm": 0.12134576588869095, |
|
"learning_rate": 3.4764102564102564e-06, |
|
"loss": 0.0002, |
|
"step": 13225 |
|
}, |
|
{ |
|
"epoch": 11.20507399577167, |
|
"grad_norm": 0.019934862852096558, |
|
"learning_rate": 3.4635897435897436e-06, |
|
"loss": 0.0011, |
|
"step": 13250 |
|
}, |
|
{ |
|
"epoch": 11.226215644820297, |
|
"grad_norm": 0.01381596364080906, |
|
"learning_rate": 3.450769230769231e-06, |
|
"loss": 0.001, |
|
"step": 13275 |
|
}, |
|
{ |
|
"epoch": 11.247357293868921, |
|
"grad_norm": 0.1572095900774002, |
|
"learning_rate": 3.437948717948718e-06, |
|
"loss": 0.0003, |
|
"step": 13300 |
|
}, |
|
{ |
|
"epoch": 11.268498942917548, |
|
"grad_norm": 0.03256658464670181, |
|
"learning_rate": 3.4251282051282054e-06, |
|
"loss": 0.0002, |
|
"step": 13325 |
|
}, |
|
{ |
|
"epoch": 11.289640591966174, |
|
"grad_norm": 0.012259602546691895, |
|
"learning_rate": 3.4123076923076926e-06, |
|
"loss": 0.0005, |
|
"step": 13350 |
|
}, |
|
{ |
|
"epoch": 11.310782241014799, |
|
"grad_norm": 0.7077483534812927, |
|
"learning_rate": 3.39948717948718e-06, |
|
"loss": 0.002, |
|
"step": 13375 |
|
}, |
|
{ |
|
"epoch": 11.331923890063425, |
|
"grad_norm": 0.013183564879000187, |
|
"learning_rate": 3.386666666666667e-06, |
|
"loss": 0.0003, |
|
"step": 13400 |
|
}, |
|
{ |
|
"epoch": 11.35306553911205, |
|
"grad_norm": 0.015149732120335102, |
|
"learning_rate": 3.3738461538461544e-06, |
|
"loss": 0.0007, |
|
"step": 13425 |
|
}, |
|
{ |
|
"epoch": 11.374207188160677, |
|
"grad_norm": 1.2345651388168335, |
|
"learning_rate": 3.3610256410256416e-06, |
|
"loss": 0.0007, |
|
"step": 13450 |
|
}, |
|
{ |
|
"epoch": 11.395348837209303, |
|
"grad_norm": 0.015832841396331787, |
|
"learning_rate": 3.3482051282051285e-06, |
|
"loss": 0.0004, |
|
"step": 13475 |
|
}, |
|
{ |
|
"epoch": 11.416490486257928, |
|
"grad_norm": 0.02144164778292179, |
|
"learning_rate": 3.3353846153846157e-06, |
|
"loss": 0.0005, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 11.437632135306554, |
|
"grad_norm": 0.02886483073234558, |
|
"learning_rate": 3.322564102564103e-06, |
|
"loss": 0.0002, |
|
"step": 13525 |
|
}, |
|
{ |
|
"epoch": 11.458773784355179, |
|
"grad_norm": 0.01857246272265911, |
|
"learning_rate": 3.3097435897435898e-06, |
|
"loss": 0.0007, |
|
"step": 13550 |
|
}, |
|
{ |
|
"epoch": 11.479915433403805, |
|
"grad_norm": 0.06685657054185867, |
|
"learning_rate": 3.296923076923077e-06, |
|
"loss": 0.0016, |
|
"step": 13575 |
|
}, |
|
{ |
|
"epoch": 11.501057082452432, |
|
"grad_norm": 0.058960702270269394, |
|
"learning_rate": 3.2841025641025643e-06, |
|
"loss": 0.0006, |
|
"step": 13600 |
|
}, |
|
{ |
|
"epoch": 11.522198731501057, |
|
"grad_norm": 0.016561295837163925, |
|
"learning_rate": 3.2712820512820515e-06, |
|
"loss": 0.0008, |
|
"step": 13625 |
|
}, |
|
{ |
|
"epoch": 11.543340380549683, |
|
"grad_norm": 0.019466349855065346, |
|
"learning_rate": 3.2584615384615388e-06, |
|
"loss": 0.0004, |
|
"step": 13650 |
|
}, |
|
{ |
|
"epoch": 11.56448202959831, |
|
"grad_norm": 0.014084039255976677, |
|
"learning_rate": 3.245641025641026e-06, |
|
"loss": 0.0002, |
|
"step": 13675 |
|
}, |
|
{ |
|
"epoch": 11.585623678646934, |
|
"grad_norm": 0.022753361612558365, |
|
"learning_rate": 3.2328205128205133e-06, |
|
"loss": 0.0002, |
|
"step": 13700 |
|
}, |
|
{ |
|
"epoch": 11.60676532769556, |
|
"grad_norm": 0.012626050040125847, |
|
"learning_rate": 3.2200000000000005e-06, |
|
"loss": 0.0007, |
|
"step": 13725 |
|
}, |
|
{ |
|
"epoch": 11.627906976744185, |
|
"grad_norm": 0.9902653694152832, |
|
"learning_rate": 3.2071794871794878e-06, |
|
"loss": 0.0009, |
|
"step": 13750 |
|
}, |
|
{ |
|
"epoch": 11.649048625792812, |
|
"grad_norm": 0.034385230392217636, |
|
"learning_rate": 3.194358974358975e-06, |
|
"loss": 0.0003, |
|
"step": 13775 |
|
}, |
|
{ |
|
"epoch": 11.670190274841438, |
|
"grad_norm": 0.013236723840236664, |
|
"learning_rate": 3.181538461538462e-06, |
|
"loss": 0.0004, |
|
"step": 13800 |
|
}, |
|
{ |
|
"epoch": 11.691331923890063, |
|
"grad_norm": 0.014498353935778141, |
|
"learning_rate": 3.168717948717949e-06, |
|
"loss": 0.0009, |
|
"step": 13825 |
|
}, |
|
{ |
|
"epoch": 11.71247357293869, |
|
"grad_norm": 0.012556152418255806, |
|
"learning_rate": 3.155897435897436e-06, |
|
"loss": 0.0007, |
|
"step": 13850 |
|
}, |
|
{ |
|
"epoch": 11.733615221987314, |
|
"grad_norm": 0.017172476276755333, |
|
"learning_rate": 3.143076923076923e-06, |
|
"loss": 0.0002, |
|
"step": 13875 |
|
}, |
|
{ |
|
"epoch": 11.75475687103594, |
|
"grad_norm": 0.04857848584651947, |
|
"learning_rate": 3.1302564102564104e-06, |
|
"loss": 0.0007, |
|
"step": 13900 |
|
}, |
|
{ |
|
"epoch": 11.775898520084567, |
|
"grad_norm": 0.2465539425611496, |
|
"learning_rate": 3.1174358974358976e-06, |
|
"loss": 0.001, |
|
"step": 13925 |
|
}, |
|
{ |
|
"epoch": 11.797040169133192, |
|
"grad_norm": 0.01584937795996666, |
|
"learning_rate": 3.104615384615385e-06, |
|
"loss": 0.0005, |
|
"step": 13950 |
|
}, |
|
{ |
|
"epoch": 11.818181818181818, |
|
"grad_norm": 0.022607628256082535, |
|
"learning_rate": 3.091794871794872e-06, |
|
"loss": 0.0003, |
|
"step": 13975 |
|
}, |
|
{ |
|
"epoch": 11.839323467230443, |
|
"grad_norm": 0.021190233528614044, |
|
"learning_rate": 3.0789743589743594e-06, |
|
"loss": 0.0005, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 11.839323467230443, |
|
"eval_loss": 0.08707120269536972, |
|
"eval_runtime": 422.3648, |
|
"eval_samples_per_second": 8.621, |
|
"eval_steps_per_second": 0.54, |
|
"eval_wer": 0.061394960322984825, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 11.86046511627907, |
|
"grad_norm": 0.02160489559173584, |
|
"learning_rate": 3.0661538461538466e-06, |
|
"loss": 0.0013, |
|
"step": 14025 |
|
}, |
|
{ |
|
"epoch": 11.881606765327696, |
|
"grad_norm": 0.017302557826042175, |
|
"learning_rate": 3.053333333333334e-06, |
|
"loss": 0.0006, |
|
"step": 14050 |
|
}, |
|
{ |
|
"epoch": 11.90274841437632, |
|
"grad_norm": 0.03932559862732887, |
|
"learning_rate": 3.040512820512821e-06, |
|
"loss": 0.0006, |
|
"step": 14075 |
|
}, |
|
{ |
|
"epoch": 11.923890063424947, |
|
"grad_norm": 0.16547563672065735, |
|
"learning_rate": 3.027692307692308e-06, |
|
"loss": 0.001, |
|
"step": 14100 |
|
}, |
|
{ |
|
"epoch": 11.945031712473574, |
|
"grad_norm": 0.05746368691325188, |
|
"learning_rate": 3.014871794871795e-06, |
|
"loss": 0.0005, |
|
"step": 14125 |
|
}, |
|
{ |
|
"epoch": 11.966173361522198, |
|
"grad_norm": 0.017229914665222168, |
|
"learning_rate": 3.002051282051282e-06, |
|
"loss": 0.0016, |
|
"step": 14150 |
|
}, |
|
{ |
|
"epoch": 11.987315010570825, |
|
"grad_norm": 0.017080632969737053, |
|
"learning_rate": 2.9892307692307693e-06, |
|
"loss": 0.0005, |
|
"step": 14175 |
|
}, |
|
{ |
|
"epoch": 12.00845665961945, |
|
"grad_norm": 0.14502297341823578, |
|
"learning_rate": 2.9764102564102565e-06, |
|
"loss": 0.0004, |
|
"step": 14200 |
|
}, |
|
{ |
|
"epoch": 12.029598308668076, |
|
"grad_norm": 0.013931095600128174, |
|
"learning_rate": 2.9635897435897438e-06, |
|
"loss": 0.0002, |
|
"step": 14225 |
|
}, |
|
{ |
|
"epoch": 12.050739957716702, |
|
"grad_norm": 0.04354655370116234, |
|
"learning_rate": 2.950769230769231e-06, |
|
"loss": 0.0004, |
|
"step": 14250 |
|
}, |
|
{ |
|
"epoch": 12.071881606765327, |
|
"grad_norm": 0.015747781842947006, |
|
"learning_rate": 2.9379487179487183e-06, |
|
"loss": 0.0006, |
|
"step": 14275 |
|
}, |
|
{ |
|
"epoch": 12.093023255813954, |
|
"grad_norm": 0.08688750118017197, |
|
"learning_rate": 2.9251282051282055e-06, |
|
"loss": 0.0009, |
|
"step": 14300 |
|
}, |
|
{ |
|
"epoch": 12.11416490486258, |
|
"grad_norm": 0.01061871089041233, |
|
"learning_rate": 2.9123076923076928e-06, |
|
"loss": 0.0013, |
|
"step": 14325 |
|
}, |
|
{ |
|
"epoch": 12.135306553911205, |
|
"grad_norm": 0.01094849780201912, |
|
"learning_rate": 2.89948717948718e-06, |
|
"loss": 0.0006, |
|
"step": 14350 |
|
}, |
|
{ |
|
"epoch": 12.156448202959831, |
|
"grad_norm": 0.012437337078154087, |
|
"learning_rate": 2.8866666666666673e-06, |
|
"loss": 0.0005, |
|
"step": 14375 |
|
}, |
|
{ |
|
"epoch": 12.177589852008456, |
|
"grad_norm": 0.014366303570568562, |
|
"learning_rate": 2.873846153846154e-06, |
|
"loss": 0.0005, |
|
"step": 14400 |
|
}, |
|
{ |
|
"epoch": 12.198731501057082, |
|
"grad_norm": 0.017898643389344215, |
|
"learning_rate": 2.8610256410256413e-06, |
|
"loss": 0.0005, |
|
"step": 14425 |
|
}, |
|
{ |
|
"epoch": 12.219873150105709, |
|
"grad_norm": 0.016448453068733215, |
|
"learning_rate": 2.848205128205128e-06, |
|
"loss": 0.0002, |
|
"step": 14450 |
|
}, |
|
{ |
|
"epoch": 12.241014799154334, |
|
"grad_norm": 0.017254604026675224, |
|
"learning_rate": 2.8353846153846154e-06, |
|
"loss": 0.0003, |
|
"step": 14475 |
|
}, |
|
{ |
|
"epoch": 12.26215644820296, |
|
"grad_norm": 0.01735294982790947, |
|
"learning_rate": 2.8225641025641027e-06, |
|
"loss": 0.0005, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 12.283298097251585, |
|
"grad_norm": 0.017333608120679855, |
|
"learning_rate": 2.80974358974359e-06, |
|
"loss": 0.0005, |
|
"step": 14525 |
|
}, |
|
{ |
|
"epoch": 12.304439746300211, |
|
"grad_norm": 0.010371909476816654, |
|
"learning_rate": 2.796923076923077e-06, |
|
"loss": 0.0004, |
|
"step": 14550 |
|
}, |
|
{ |
|
"epoch": 12.325581395348838, |
|
"grad_norm": 0.051518309861421585, |
|
"learning_rate": 2.7841025641025644e-06, |
|
"loss": 0.0003, |
|
"step": 14575 |
|
}, |
|
{ |
|
"epoch": 12.346723044397462, |
|
"grad_norm": 0.015523129142820835, |
|
"learning_rate": 2.7712820512820516e-06, |
|
"loss": 0.0002, |
|
"step": 14600 |
|
}, |
|
{ |
|
"epoch": 12.367864693446089, |
|
"grad_norm": 0.013449127785861492, |
|
"learning_rate": 2.758461538461539e-06, |
|
"loss": 0.0003, |
|
"step": 14625 |
|
}, |
|
{ |
|
"epoch": 12.389006342494715, |
|
"grad_norm": 0.28945910930633545, |
|
"learning_rate": 2.745641025641026e-06, |
|
"loss": 0.0008, |
|
"step": 14650 |
|
}, |
|
{ |
|
"epoch": 12.41014799154334, |
|
"grad_norm": 0.01732316054403782, |
|
"learning_rate": 2.7328205128205134e-06, |
|
"loss": 0.0002, |
|
"step": 14675 |
|
}, |
|
{ |
|
"epoch": 12.431289640591967, |
|
"grad_norm": 0.0187962856143713, |
|
"learning_rate": 2.7200000000000002e-06, |
|
"loss": 0.0006, |
|
"step": 14700 |
|
}, |
|
{ |
|
"epoch": 12.452431289640591, |
|
"grad_norm": 0.010457909666001797, |
|
"learning_rate": 2.7071794871794875e-06, |
|
"loss": 0.0008, |
|
"step": 14725 |
|
}, |
|
{ |
|
"epoch": 12.473572938689218, |
|
"grad_norm": 0.017735542729496956, |
|
"learning_rate": 2.6943589743589743e-06, |
|
"loss": 0.0006, |
|
"step": 14750 |
|
}, |
|
{ |
|
"epoch": 12.494714587737844, |
|
"grad_norm": 0.01583629846572876, |
|
"learning_rate": 2.6815384615384615e-06, |
|
"loss": 0.0008, |
|
"step": 14775 |
|
}, |
|
{ |
|
"epoch": 12.515856236786469, |
|
"grad_norm": 0.024372195824980736, |
|
"learning_rate": 2.6687179487179488e-06, |
|
"loss": 0.0003, |
|
"step": 14800 |
|
}, |
|
{ |
|
"epoch": 12.536997885835095, |
|
"grad_norm": 0.03954934701323509, |
|
"learning_rate": 2.655897435897436e-06, |
|
"loss": 0.0002, |
|
"step": 14825 |
|
}, |
|
{ |
|
"epoch": 12.55813953488372, |
|
"grad_norm": 0.07841933518648148, |
|
"learning_rate": 2.6430769230769233e-06, |
|
"loss": 0.0003, |
|
"step": 14850 |
|
}, |
|
{ |
|
"epoch": 12.579281183932347, |
|
"grad_norm": 0.023459434509277344, |
|
"learning_rate": 2.6302564102564105e-06, |
|
"loss": 0.0008, |
|
"step": 14875 |
|
}, |
|
{ |
|
"epoch": 12.600422832980973, |
|
"grad_norm": 0.02575707994401455, |
|
"learning_rate": 2.6174358974358978e-06, |
|
"loss": 0.0002, |
|
"step": 14900 |
|
}, |
|
{ |
|
"epoch": 12.621564482029598, |
|
"grad_norm": 0.01122299861162901, |
|
"learning_rate": 2.604615384615385e-06, |
|
"loss": 0.0008, |
|
"step": 14925 |
|
}, |
|
{ |
|
"epoch": 12.642706131078224, |
|
"grad_norm": 0.013861022889614105, |
|
"learning_rate": 2.5917948717948723e-06, |
|
"loss": 0.0002, |
|
"step": 14950 |
|
}, |
|
{ |
|
"epoch": 12.66384778012685, |
|
"grad_norm": 0.015051736496388912, |
|
"learning_rate": 2.5789743589743595e-06, |
|
"loss": 0.0009, |
|
"step": 14975 |
|
}, |
|
{ |
|
"epoch": 12.684989429175475, |
|
"grad_norm": 0.053748294711112976, |
|
"learning_rate": 2.5661538461538463e-06, |
|
"loss": 0.0002, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 12.684989429175475, |
|
"eval_loss": 0.08498353511095047, |
|
"eval_runtime": 422.6177, |
|
"eval_samples_per_second": 8.615, |
|
"eval_steps_per_second": 0.539, |
|
"eval_wer": 0.05963153742633069, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 12.706131078224102, |
|
"grad_norm": 0.008815528824925423, |
|
"learning_rate": 2.5533333333333336e-06, |
|
"loss": 0.0002, |
|
"step": 15025 |
|
}, |
|
{ |
|
"epoch": 12.727272727272727, |
|
"grad_norm": 0.01506373006850481, |
|
"learning_rate": 2.5405128205128204e-06, |
|
"loss": 0.0007, |
|
"step": 15050 |
|
}, |
|
{ |
|
"epoch": 12.748414376321353, |
|
"grad_norm": 0.11881545186042786, |
|
"learning_rate": 2.5276923076923077e-06, |
|
"loss": 0.0002, |
|
"step": 15075 |
|
}, |
|
{ |
|
"epoch": 12.76955602536998, |
|
"grad_norm": 0.01836121454834938, |
|
"learning_rate": 2.514871794871795e-06, |
|
"loss": 0.0004, |
|
"step": 15100 |
|
}, |
|
{ |
|
"epoch": 12.790697674418604, |
|
"grad_norm": 0.019225163385272026, |
|
"learning_rate": 2.502051282051282e-06, |
|
"loss": 0.0006, |
|
"step": 15125 |
|
}, |
|
{ |
|
"epoch": 12.81183932346723, |
|
"grad_norm": 0.02618946135044098, |
|
"learning_rate": 2.4892307692307694e-06, |
|
"loss": 0.0002, |
|
"step": 15150 |
|
}, |
|
{ |
|
"epoch": 12.832980972515855, |
|
"grad_norm": 0.02191704511642456, |
|
"learning_rate": 2.4764102564102567e-06, |
|
"loss": 0.0002, |
|
"step": 15175 |
|
}, |
|
{ |
|
"epoch": 12.854122621564482, |
|
"grad_norm": 0.010356785729527473, |
|
"learning_rate": 2.463589743589744e-06, |
|
"loss": 0.0004, |
|
"step": 15200 |
|
}, |
|
{ |
|
"epoch": 12.875264270613108, |
|
"grad_norm": 0.011190637946128845, |
|
"learning_rate": 2.450769230769231e-06, |
|
"loss": 0.0002, |
|
"step": 15225 |
|
}, |
|
{ |
|
"epoch": 12.896405919661733, |
|
"grad_norm": 0.4306105375289917, |
|
"learning_rate": 2.4379487179487184e-06, |
|
"loss": 0.0003, |
|
"step": 15250 |
|
}, |
|
{ |
|
"epoch": 12.91754756871036, |
|
"grad_norm": 0.01129214372485876, |
|
"learning_rate": 2.4251282051282056e-06, |
|
"loss": 0.0004, |
|
"step": 15275 |
|
}, |
|
{ |
|
"epoch": 12.938689217758984, |
|
"grad_norm": 0.013043936342000961, |
|
"learning_rate": 2.4123076923076925e-06, |
|
"loss": 0.0005, |
|
"step": 15300 |
|
}, |
|
{ |
|
"epoch": 12.95983086680761, |
|
"grad_norm": 0.03151563182473183, |
|
"learning_rate": 2.3994871794871797e-06, |
|
"loss": 0.0002, |
|
"step": 15325 |
|
}, |
|
{ |
|
"epoch": 12.980972515856237, |
|
"grad_norm": 0.01353500597178936, |
|
"learning_rate": 2.386666666666667e-06, |
|
"loss": 0.0002, |
|
"step": 15350 |
|
}, |
|
{ |
|
"epoch": 13.002114164904862, |
|
"grad_norm": 0.010209338739514351, |
|
"learning_rate": 2.3738461538461542e-06, |
|
"loss": 0.0009, |
|
"step": 15375 |
|
}, |
|
{ |
|
"epoch": 13.023255813953488, |
|
"grad_norm": 0.01054307445883751, |
|
"learning_rate": 2.3610256410256415e-06, |
|
"loss": 0.0002, |
|
"step": 15400 |
|
}, |
|
{ |
|
"epoch": 13.044397463002115, |
|
"grad_norm": 0.0059889694675803185, |
|
"learning_rate": 2.3482051282051287e-06, |
|
"loss": 0.0001, |
|
"step": 15425 |
|
}, |
|
{ |
|
"epoch": 13.06553911205074, |
|
"grad_norm": 0.008033188059926033, |
|
"learning_rate": 2.3353846153846155e-06, |
|
"loss": 0.0001, |
|
"step": 15450 |
|
}, |
|
{ |
|
"epoch": 13.086680761099366, |
|
"grad_norm": 0.009764975868165493, |
|
"learning_rate": 2.3225641025641028e-06, |
|
"loss": 0.0001, |
|
"step": 15475 |
|
}, |
|
{ |
|
"epoch": 13.10782241014799, |
|
"grad_norm": 0.015029883943498135, |
|
"learning_rate": 2.30974358974359e-06, |
|
"loss": 0.0001, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 13.128964059196617, |
|
"grad_norm": 0.8327123522758484, |
|
"learning_rate": 2.2969230769230773e-06, |
|
"loss": 0.0003, |
|
"step": 15525 |
|
}, |
|
{ |
|
"epoch": 13.150105708245244, |
|
"grad_norm": 0.010631484910845757, |
|
"learning_rate": 2.2841025641025645e-06, |
|
"loss": 0.0005, |
|
"step": 15550 |
|
}, |
|
{ |
|
"epoch": 13.171247357293868, |
|
"grad_norm": 0.011425021104514599, |
|
"learning_rate": 2.2712820512820518e-06, |
|
"loss": 0.0001, |
|
"step": 15575 |
|
}, |
|
{ |
|
"epoch": 13.192389006342495, |
|
"grad_norm": 0.009755819104611874, |
|
"learning_rate": 2.2584615384615386e-06, |
|
"loss": 0.0002, |
|
"step": 15600 |
|
}, |
|
{ |
|
"epoch": 13.213530655391121, |
|
"grad_norm": 0.011913259513676167, |
|
"learning_rate": 2.245641025641026e-06, |
|
"loss": 0.0007, |
|
"step": 15625 |
|
}, |
|
{ |
|
"epoch": 13.234672304439746, |
|
"grad_norm": 0.0059737637639045715, |
|
"learning_rate": 2.232820512820513e-06, |
|
"loss": 0.0002, |
|
"step": 15650 |
|
}, |
|
{ |
|
"epoch": 13.255813953488373, |
|
"grad_norm": 0.015654481947422028, |
|
"learning_rate": 2.2200000000000003e-06, |
|
"loss": 0.0009, |
|
"step": 15675 |
|
}, |
|
{ |
|
"epoch": 13.276955602536997, |
|
"grad_norm": 0.010202393867075443, |
|
"learning_rate": 2.2071794871794876e-06, |
|
"loss": 0.0002, |
|
"step": 15700 |
|
}, |
|
{ |
|
"epoch": 13.298097251585624, |
|
"grad_norm": 0.008887005969882011, |
|
"learning_rate": 2.194358974358975e-06, |
|
"loss": 0.0002, |
|
"step": 15725 |
|
}, |
|
{ |
|
"epoch": 13.31923890063425, |
|
"grad_norm": 0.010564594529569149, |
|
"learning_rate": 2.1815384615384617e-06, |
|
"loss": 0.0001, |
|
"step": 15750 |
|
}, |
|
{ |
|
"epoch": 13.340380549682875, |
|
"grad_norm": 0.007872478105127811, |
|
"learning_rate": 2.168717948717949e-06, |
|
"loss": 0.0001, |
|
"step": 15775 |
|
}, |
|
{ |
|
"epoch": 13.361522198731501, |
|
"grad_norm": 0.014362242072820663, |
|
"learning_rate": 2.155897435897436e-06, |
|
"loss": 0.0001, |
|
"step": 15800 |
|
}, |
|
{ |
|
"epoch": 13.382663847780126, |
|
"grad_norm": 0.013115731067955494, |
|
"learning_rate": 2.1430769230769234e-06, |
|
"loss": 0.0001, |
|
"step": 15825 |
|
}, |
|
{ |
|
"epoch": 13.403805496828753, |
|
"grad_norm": 0.007929886691272259, |
|
"learning_rate": 2.1302564102564107e-06, |
|
"loss": 0.0004, |
|
"step": 15850 |
|
}, |
|
{ |
|
"epoch": 13.424947145877379, |
|
"grad_norm": 0.01074293628334999, |
|
"learning_rate": 2.117435897435898e-06, |
|
"loss": 0.0001, |
|
"step": 15875 |
|
}, |
|
{ |
|
"epoch": 13.446088794926004, |
|
"grad_norm": 0.007108274381607771, |
|
"learning_rate": 2.1046153846153847e-06, |
|
"loss": 0.0002, |
|
"step": 15900 |
|
}, |
|
{ |
|
"epoch": 13.46723044397463, |
|
"grad_norm": 0.006991777569055557, |
|
"learning_rate": 2.091794871794872e-06, |
|
"loss": 0.0003, |
|
"step": 15925 |
|
}, |
|
{ |
|
"epoch": 13.488372093023255, |
|
"grad_norm": 0.006929017137736082, |
|
"learning_rate": 2.0789743589743592e-06, |
|
"loss": 0.0001, |
|
"step": 15950 |
|
}, |
|
{ |
|
"epoch": 13.509513742071881, |
|
"grad_norm": 0.016624469310045242, |
|
"learning_rate": 2.0661538461538465e-06, |
|
"loss": 0.0002, |
|
"step": 15975 |
|
}, |
|
{ |
|
"epoch": 13.530655391120508, |
|
"grad_norm": 0.009122622199356556, |
|
"learning_rate": 2.0533333333333337e-06, |
|
"loss": 0.0004, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 13.530655391120508, |
|
"eval_loss": 0.08493294566869736, |
|
"eval_runtime": 416.438, |
|
"eval_samples_per_second": 8.743, |
|
"eval_steps_per_second": 0.548, |
|
"eval_wer": 0.06004919021764351, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 13.551797040169133, |
|
"grad_norm": 0.007773926947265863, |
|
"learning_rate": 2.040512820512821e-06, |
|
"loss": 0.0003, |
|
"step": 16025 |
|
}, |
|
{ |
|
"epoch": 13.572938689217759, |
|
"grad_norm": 0.023034438490867615, |
|
"learning_rate": 2.027692307692308e-06, |
|
"loss": 0.0004, |
|
"step": 16050 |
|
}, |
|
{ |
|
"epoch": 13.594080338266386, |
|
"grad_norm": 0.012188156135380268, |
|
"learning_rate": 2.014871794871795e-06, |
|
"loss": 0.0001, |
|
"step": 16075 |
|
}, |
|
{ |
|
"epoch": 13.61522198731501, |
|
"grad_norm": 0.009154338389635086, |
|
"learning_rate": 2.0020512820512823e-06, |
|
"loss": 0.0001, |
|
"step": 16100 |
|
}, |
|
{ |
|
"epoch": 13.636363636363637, |
|
"grad_norm": 0.007401302456855774, |
|
"learning_rate": 1.9892307692307695e-06, |
|
"loss": 0.0002, |
|
"step": 16125 |
|
}, |
|
{ |
|
"epoch": 13.657505285412261, |
|
"grad_norm": 0.011268429458141327, |
|
"learning_rate": 1.9764102564102568e-06, |
|
"loss": 0.0005, |
|
"step": 16150 |
|
}, |
|
{ |
|
"epoch": 13.678646934460888, |
|
"grad_norm": 0.010454394854605198, |
|
"learning_rate": 1.963589743589744e-06, |
|
"loss": 0.0001, |
|
"step": 16175 |
|
}, |
|
{ |
|
"epoch": 13.699788583509514, |
|
"grad_norm": 0.011891184374690056, |
|
"learning_rate": 1.950769230769231e-06, |
|
"loss": 0.0003, |
|
"step": 16200 |
|
}, |
|
{ |
|
"epoch": 13.720930232558139, |
|
"grad_norm": 0.016350483521819115, |
|
"learning_rate": 1.937948717948718e-06, |
|
"loss": 0.0001, |
|
"step": 16225 |
|
}, |
|
{ |
|
"epoch": 13.742071881606766, |
|
"grad_norm": 0.13966374099254608, |
|
"learning_rate": 1.9251282051282054e-06, |
|
"loss": 0.0004, |
|
"step": 16250 |
|
}, |
|
{ |
|
"epoch": 13.763213530655392, |
|
"grad_norm": 0.008222724311053753, |
|
"learning_rate": 1.9123076923076926e-06, |
|
"loss": 0.0001, |
|
"step": 16275 |
|
}, |
|
{ |
|
"epoch": 13.784355179704017, |
|
"grad_norm": 0.011430777609348297, |
|
"learning_rate": 1.8994871794871796e-06, |
|
"loss": 0.0002, |
|
"step": 16300 |
|
}, |
|
{ |
|
"epoch": 13.805496828752643, |
|
"grad_norm": 0.009091757237911224, |
|
"learning_rate": 1.8866666666666669e-06, |
|
"loss": 0.0006, |
|
"step": 16325 |
|
}, |
|
{ |
|
"epoch": 13.826638477801268, |
|
"grad_norm": 0.009097087197005749, |
|
"learning_rate": 1.873846153846154e-06, |
|
"loss": 0.0001, |
|
"step": 16350 |
|
}, |
|
{ |
|
"epoch": 13.847780126849894, |
|
"grad_norm": 0.0094605116173625, |
|
"learning_rate": 1.8610256410256412e-06, |
|
"loss": 0.0001, |
|
"step": 16375 |
|
}, |
|
{ |
|
"epoch": 13.86892177589852, |
|
"grad_norm": 0.006917539052665234, |
|
"learning_rate": 1.8482051282051284e-06, |
|
"loss": 0.0001, |
|
"step": 16400 |
|
}, |
|
{ |
|
"epoch": 13.890063424947146, |
|
"grad_norm": 0.006485483143478632, |
|
"learning_rate": 1.8353846153846155e-06, |
|
"loss": 0.0004, |
|
"step": 16425 |
|
}, |
|
{ |
|
"epoch": 13.911205073995772, |
|
"grad_norm": 0.010646677576005459, |
|
"learning_rate": 1.8225641025641027e-06, |
|
"loss": 0.0003, |
|
"step": 16450 |
|
}, |
|
{ |
|
"epoch": 13.932346723044397, |
|
"grad_norm": 0.009001165628433228, |
|
"learning_rate": 1.80974358974359e-06, |
|
"loss": 0.0002, |
|
"step": 16475 |
|
}, |
|
{ |
|
"epoch": 13.953488372093023, |
|
"grad_norm": 0.007851574569940567, |
|
"learning_rate": 1.796923076923077e-06, |
|
"loss": 0.0003, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 13.97463002114165, |
|
"grad_norm": 0.005184480920433998, |
|
"learning_rate": 1.7841025641025642e-06, |
|
"loss": 0.0001, |
|
"step": 16525 |
|
}, |
|
{ |
|
"epoch": 13.995771670190274, |
|
"grad_norm": 0.009073914960026741, |
|
"learning_rate": 1.7712820512820515e-06, |
|
"loss": 0.0005, |
|
"step": 16550 |
|
}, |
|
{ |
|
"epoch": 14.0169133192389, |
|
"grad_norm": 0.006329812109470367, |
|
"learning_rate": 1.7584615384615385e-06, |
|
"loss": 0.0002, |
|
"step": 16575 |
|
}, |
|
{ |
|
"epoch": 14.038054968287527, |
|
"grad_norm": 0.00448222178965807, |
|
"learning_rate": 1.7456410256410258e-06, |
|
"loss": 0.0001, |
|
"step": 16600 |
|
}, |
|
{ |
|
"epoch": 14.059196617336152, |
|
"grad_norm": 0.006917804013937712, |
|
"learning_rate": 1.732820512820513e-06, |
|
"loss": 0.0001, |
|
"step": 16625 |
|
}, |
|
{ |
|
"epoch": 14.080338266384778, |
|
"grad_norm": 0.011933974921703339, |
|
"learning_rate": 1.72e-06, |
|
"loss": 0.0002, |
|
"step": 16650 |
|
}, |
|
{ |
|
"epoch": 14.101479915433403, |
|
"grad_norm": 0.006539530120790005, |
|
"learning_rate": 1.7071794871794873e-06, |
|
"loss": 0.0001, |
|
"step": 16675 |
|
}, |
|
{ |
|
"epoch": 14.12262156448203, |
|
"grad_norm": 0.007711876183748245, |
|
"learning_rate": 1.6943589743589745e-06, |
|
"loss": 0.0001, |
|
"step": 16700 |
|
}, |
|
{ |
|
"epoch": 14.143763213530656, |
|
"grad_norm": 0.00857022125273943, |
|
"learning_rate": 1.6820512820512822e-06, |
|
"loss": 0.0007, |
|
"step": 16725 |
|
}, |
|
{ |
|
"epoch": 14.16490486257928, |
|
"grad_norm": 0.00736591499298811, |
|
"learning_rate": 1.6692307692307694e-06, |
|
"loss": 0.0001, |
|
"step": 16750 |
|
}, |
|
{ |
|
"epoch": 14.186046511627907, |
|
"grad_norm": 0.008430298417806625, |
|
"learning_rate": 1.6564102564102567e-06, |
|
"loss": 0.0001, |
|
"step": 16775 |
|
}, |
|
{ |
|
"epoch": 14.207188160676532, |
|
"grad_norm": 0.006714751478284597, |
|
"learning_rate": 1.643589743589744e-06, |
|
"loss": 0.0002, |
|
"step": 16800 |
|
}, |
|
{ |
|
"epoch": 14.228329809725158, |
|
"grad_norm": 0.007295829243957996, |
|
"learning_rate": 1.6307692307692307e-06, |
|
"loss": 0.0001, |
|
"step": 16825 |
|
}, |
|
{ |
|
"epoch": 14.249471458773785, |
|
"grad_norm": 0.005545494146645069, |
|
"learning_rate": 1.617948717948718e-06, |
|
"loss": 0.0001, |
|
"step": 16850 |
|
}, |
|
{ |
|
"epoch": 14.27061310782241, |
|
"grad_norm": 0.006525352597236633, |
|
"learning_rate": 1.6051282051282052e-06, |
|
"loss": 0.0003, |
|
"step": 16875 |
|
}, |
|
{ |
|
"epoch": 14.291754756871036, |
|
"grad_norm": 0.006453375332057476, |
|
"learning_rate": 1.5923076923076925e-06, |
|
"loss": 0.0001, |
|
"step": 16900 |
|
}, |
|
{ |
|
"epoch": 14.312896405919663, |
|
"grad_norm": 0.005989603232592344, |
|
"learning_rate": 1.5794871794871797e-06, |
|
"loss": 0.0001, |
|
"step": 16925 |
|
}, |
|
{ |
|
"epoch": 14.334038054968287, |
|
"grad_norm": 0.006138788536190987, |
|
"learning_rate": 1.566666666666667e-06, |
|
"loss": 0.0001, |
|
"step": 16950 |
|
}, |
|
{ |
|
"epoch": 14.355179704016914, |
|
"grad_norm": 0.006993785500526428, |
|
"learning_rate": 1.5538461538461538e-06, |
|
"loss": 0.0001, |
|
"step": 16975 |
|
}, |
|
{ |
|
"epoch": 14.376321353065538, |
|
"grad_norm": 0.007072498090565205, |
|
"learning_rate": 1.541025641025641e-06, |
|
"loss": 0.0001, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 14.376321353065538, |
|
"eval_loss": 0.0867857038974762, |
|
"eval_runtime": 413.9388, |
|
"eval_samples_per_second": 8.796, |
|
"eval_steps_per_second": 0.551, |
|
"eval_wer": 0.05916747876931644, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 14.397463002114165, |
|
"grad_norm": 0.008112003095448017, |
|
"learning_rate": 1.5282051282051283e-06, |
|
"loss": 0.0001, |
|
"step": 17025 |
|
}, |
|
{ |
|
"epoch": 14.418604651162791, |
|
"grad_norm": 0.007216181140393019, |
|
"learning_rate": 1.5153846153846156e-06, |
|
"loss": 0.0002, |
|
"step": 17050 |
|
}, |
|
{ |
|
"epoch": 14.439746300211416, |
|
"grad_norm": 0.003465973073616624, |
|
"learning_rate": 1.5025641025641028e-06, |
|
"loss": 0.0001, |
|
"step": 17075 |
|
}, |
|
{ |
|
"epoch": 14.460887949260043, |
|
"grad_norm": 0.005432849284261465, |
|
"learning_rate": 1.48974358974359e-06, |
|
"loss": 0.0001, |
|
"step": 17100 |
|
}, |
|
{ |
|
"epoch": 14.482029598308667, |
|
"grad_norm": 0.00709709245711565, |
|
"learning_rate": 1.476923076923077e-06, |
|
"loss": 0.0001, |
|
"step": 17125 |
|
}, |
|
{ |
|
"epoch": 14.503171247357294, |
|
"grad_norm": 0.006040381733328104, |
|
"learning_rate": 1.4641025641025641e-06, |
|
"loss": 0.0001, |
|
"step": 17150 |
|
}, |
|
{ |
|
"epoch": 14.52431289640592, |
|
"grad_norm": 0.00656255753710866, |
|
"learning_rate": 1.4512820512820514e-06, |
|
"loss": 0.0001, |
|
"step": 17175 |
|
}, |
|
{ |
|
"epoch": 14.545454545454545, |
|
"grad_norm": 0.006466065067797899, |
|
"learning_rate": 1.4384615384615386e-06, |
|
"loss": 0.0001, |
|
"step": 17200 |
|
}, |
|
{ |
|
"epoch": 14.566596194503171, |
|
"grad_norm": 0.008718462660908699, |
|
"learning_rate": 1.4261538461538462e-06, |
|
"loss": 0.0003, |
|
"step": 17225 |
|
}, |
|
{ |
|
"epoch": 14.587737843551796, |
|
"grad_norm": 0.0089957807213068, |
|
"learning_rate": 1.4133333333333335e-06, |
|
"loss": 0.0001, |
|
"step": 17250 |
|
}, |
|
{ |
|
"epoch": 14.608879492600423, |
|
"grad_norm": 0.004027772229164839, |
|
"learning_rate": 1.4005128205128207e-06, |
|
"loss": 0.0003, |
|
"step": 17275 |
|
}, |
|
{ |
|
"epoch": 14.630021141649049, |
|
"grad_norm": 0.006016540341079235, |
|
"learning_rate": 1.3876923076923076e-06, |
|
"loss": 0.0001, |
|
"step": 17300 |
|
}, |
|
{ |
|
"epoch": 14.651162790697674, |
|
"grad_norm": 0.006955642718821764, |
|
"learning_rate": 1.3748717948717948e-06, |
|
"loss": 0.0001, |
|
"step": 17325 |
|
}, |
|
{ |
|
"epoch": 14.6723044397463, |
|
"grad_norm": 0.006230717524886131, |
|
"learning_rate": 1.362051282051282e-06, |
|
"loss": 0.0001, |
|
"step": 17350 |
|
}, |
|
{ |
|
"epoch": 14.693446088794927, |
|
"grad_norm": 0.007248835172504187, |
|
"learning_rate": 1.3492307692307693e-06, |
|
"loss": 0.0001, |
|
"step": 17375 |
|
}, |
|
{ |
|
"epoch": 14.714587737843551, |
|
"grad_norm": 0.009279552847146988, |
|
"learning_rate": 1.3364102564102566e-06, |
|
"loss": 0.0001, |
|
"step": 17400 |
|
}, |
|
{ |
|
"epoch": 14.735729386892178, |
|
"grad_norm": 0.004904840141534805, |
|
"learning_rate": 1.3235897435897438e-06, |
|
"loss": 0.0001, |
|
"step": 17425 |
|
}, |
|
{ |
|
"epoch": 14.756871035940803, |
|
"grad_norm": 0.008709315210580826, |
|
"learning_rate": 1.3107692307692308e-06, |
|
"loss": 0.0002, |
|
"step": 17450 |
|
}, |
|
{ |
|
"epoch": 14.778012684989429, |
|
"grad_norm": 0.005629656370729208, |
|
"learning_rate": 1.2979487179487179e-06, |
|
"loss": 0.0001, |
|
"step": 17475 |
|
}, |
|
{ |
|
"epoch": 14.799154334038056, |
|
"grad_norm": 0.007197321858257055, |
|
"learning_rate": 1.2851282051282051e-06, |
|
"loss": 0.0001, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 14.82029598308668, |
|
"grad_norm": 0.013610289432108402, |
|
"learning_rate": 1.2723076923076924e-06, |
|
"loss": 0.0001, |
|
"step": 17525 |
|
}, |
|
{ |
|
"epoch": 14.841437632135307, |
|
"grad_norm": 0.006126283202320337, |
|
"learning_rate": 1.2594871794871796e-06, |
|
"loss": 0.0004, |
|
"step": 17550 |
|
}, |
|
{ |
|
"epoch": 14.862579281183933, |
|
"grad_norm": 0.006340153515338898, |
|
"learning_rate": 1.2466666666666667e-06, |
|
"loss": 0.0004, |
|
"step": 17575 |
|
}, |
|
{ |
|
"epoch": 14.883720930232558, |
|
"grad_norm": 0.007039876654744148, |
|
"learning_rate": 1.233846153846154e-06, |
|
"loss": 0.0001, |
|
"step": 17600 |
|
}, |
|
{ |
|
"epoch": 14.904862579281184, |
|
"grad_norm": 0.006753341294825077, |
|
"learning_rate": 1.2210256410256412e-06, |
|
"loss": 0.0001, |
|
"step": 17625 |
|
}, |
|
{ |
|
"epoch": 14.926004228329809, |
|
"grad_norm": 0.007074939087033272, |
|
"learning_rate": 1.2082051282051282e-06, |
|
"loss": 0.0001, |
|
"step": 17650 |
|
}, |
|
{ |
|
"epoch": 14.947145877378436, |
|
"grad_norm": 0.006931220646947622, |
|
"learning_rate": 1.1953846153846154e-06, |
|
"loss": 0.0002, |
|
"step": 17675 |
|
}, |
|
{ |
|
"epoch": 14.968287526427062, |
|
"grad_norm": 0.006897748447954655, |
|
"learning_rate": 1.1825641025641027e-06, |
|
"loss": 0.0001, |
|
"step": 17700 |
|
}, |
|
{ |
|
"epoch": 14.989429175475687, |
|
"grad_norm": 0.0052500138990581036, |
|
"learning_rate": 1.1697435897435897e-06, |
|
"loss": 0.0003, |
|
"step": 17725 |
|
}, |
|
{ |
|
"epoch": 15.010570824524313, |
|
"grad_norm": 0.003996185027062893, |
|
"learning_rate": 1.156923076923077e-06, |
|
"loss": 0.0002, |
|
"step": 17750 |
|
}, |
|
{ |
|
"epoch": 15.031712473572938, |
|
"grad_norm": 0.006482282187789679, |
|
"learning_rate": 1.1441025641025642e-06, |
|
"loss": 0.0001, |
|
"step": 17775 |
|
}, |
|
{ |
|
"epoch": 15.052854122621564, |
|
"grad_norm": 0.004399996716529131, |
|
"learning_rate": 1.1312820512820513e-06, |
|
"loss": 0.0001, |
|
"step": 17800 |
|
}, |
|
{ |
|
"epoch": 15.073995771670191, |
|
"grad_norm": 0.004176503047347069, |
|
"learning_rate": 1.1184615384615385e-06, |
|
"loss": 0.0001, |
|
"step": 17825 |
|
}, |
|
{ |
|
"epoch": 15.095137420718816, |
|
"grad_norm": 0.005335643887519836, |
|
"learning_rate": 1.1056410256410258e-06, |
|
"loss": 0.0001, |
|
"step": 17850 |
|
}, |
|
{ |
|
"epoch": 15.116279069767442, |
|
"grad_norm": 0.006689351052045822, |
|
"learning_rate": 1.0928205128205128e-06, |
|
"loss": 0.0001, |
|
"step": 17875 |
|
}, |
|
{ |
|
"epoch": 15.137420718816067, |
|
"grad_norm": 0.0035949235316365957, |
|
"learning_rate": 1.08e-06, |
|
"loss": 0.0001, |
|
"step": 17900 |
|
}, |
|
{ |
|
"epoch": 15.158562367864693, |
|
"grad_norm": 0.004826567135751247, |
|
"learning_rate": 1.0671794871794873e-06, |
|
"loss": 0.0001, |
|
"step": 17925 |
|
}, |
|
{ |
|
"epoch": 15.17970401691332, |
|
"grad_norm": 0.006563639268279076, |
|
"learning_rate": 1.0543589743589743e-06, |
|
"loss": 0.0001, |
|
"step": 17950 |
|
}, |
|
{ |
|
"epoch": 15.200845665961944, |
|
"grad_norm": 0.005385232623666525, |
|
"learning_rate": 1.0415384615384616e-06, |
|
"loss": 0.0001, |
|
"step": 17975 |
|
}, |
|
{ |
|
"epoch": 15.221987315010571, |
|
"grad_norm": 0.004666761960834265, |
|
"learning_rate": 1.0287179487179488e-06, |
|
"loss": 0.0002, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 15.221987315010571, |
|
"eval_loss": 0.08731851726770401, |
|
"eval_runtime": 418.9511, |
|
"eval_samples_per_second": 8.691, |
|
"eval_steps_per_second": 0.544, |
|
"eval_wer": 0.05930669636642071, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 15.243128964059197, |
|
"grad_norm": 0.004958902020007372, |
|
"learning_rate": 1.0158974358974359e-06, |
|
"loss": 0.0001, |
|
"step": 18025 |
|
}, |
|
{ |
|
"epoch": 15.264270613107822, |
|
"grad_norm": 0.005769228097051382, |
|
"learning_rate": 1.003076923076923e-06, |
|
"loss": 0.0001, |
|
"step": 18050 |
|
}, |
|
{ |
|
"epoch": 15.285412262156449, |
|
"grad_norm": 0.004522492177784443, |
|
"learning_rate": 9.902564102564103e-07, |
|
"loss": 0.0001, |
|
"step": 18075 |
|
}, |
|
{ |
|
"epoch": 15.306553911205073, |
|
"grad_norm": 0.00491857435554266, |
|
"learning_rate": 9.774358974358974e-07, |
|
"loss": 0.0001, |
|
"step": 18100 |
|
}, |
|
{ |
|
"epoch": 15.3276955602537, |
|
"grad_norm": 0.007983505725860596, |
|
"learning_rate": 9.646153846153846e-07, |
|
"loss": 0.0001, |
|
"step": 18125 |
|
}, |
|
{ |
|
"epoch": 15.348837209302326, |
|
"grad_norm": 0.004594389349222183, |
|
"learning_rate": 9.517948717948719e-07, |
|
"loss": 0.0001, |
|
"step": 18150 |
|
}, |
|
{ |
|
"epoch": 15.369978858350951, |
|
"grad_norm": 0.005598857067525387, |
|
"learning_rate": 9.38974358974359e-07, |
|
"loss": 0.0001, |
|
"step": 18175 |
|
}, |
|
{ |
|
"epoch": 15.391120507399577, |
|
"grad_norm": 0.004582141991704702, |
|
"learning_rate": 9.261538461538462e-07, |
|
"loss": 0.0001, |
|
"step": 18200 |
|
}, |
|
{ |
|
"epoch": 15.412262156448204, |
|
"grad_norm": 0.008931068703532219, |
|
"learning_rate": 9.133333333333334e-07, |
|
"loss": 0.0001, |
|
"step": 18225 |
|
}, |
|
{ |
|
"epoch": 15.433403805496829, |
|
"grad_norm": 0.0040603941306471825, |
|
"learning_rate": 9.005128205128206e-07, |
|
"loss": 0.0001, |
|
"step": 18250 |
|
}, |
|
{ |
|
"epoch": 15.454545454545455, |
|
"grad_norm": 0.005295175593346357, |
|
"learning_rate": 8.876923076923077e-07, |
|
"loss": 0.0001, |
|
"step": 18275 |
|
}, |
|
{ |
|
"epoch": 15.47568710359408, |
|
"grad_norm": 0.005524117033928633, |
|
"learning_rate": 8.748717948717949e-07, |
|
"loss": 0.0001, |
|
"step": 18300 |
|
}, |
|
{ |
|
"epoch": 15.496828752642706, |
|
"grad_norm": 0.005777684506028891, |
|
"learning_rate": 8.620512820512822e-07, |
|
"loss": 0.0001, |
|
"step": 18325 |
|
}, |
|
{ |
|
"epoch": 15.517970401691333, |
|
"grad_norm": 0.004244440235197544, |
|
"learning_rate": 8.492307692307692e-07, |
|
"loss": 0.0001, |
|
"step": 18350 |
|
}, |
|
{ |
|
"epoch": 15.539112050739957, |
|
"grad_norm": 0.008006569929420948, |
|
"learning_rate": 8.364102564102565e-07, |
|
"loss": 0.0001, |
|
"step": 18375 |
|
}, |
|
{ |
|
"epoch": 15.560253699788584, |
|
"grad_norm": 0.00508382823318243, |
|
"learning_rate": 8.235897435897437e-07, |
|
"loss": 0.0001, |
|
"step": 18400 |
|
}, |
|
{ |
|
"epoch": 15.581395348837209, |
|
"grad_norm": 0.005526789929717779, |
|
"learning_rate": 8.107692307692308e-07, |
|
"loss": 0.0001, |
|
"step": 18425 |
|
}, |
|
{ |
|
"epoch": 15.602536997885835, |
|
"grad_norm": 0.0036265768576413393, |
|
"learning_rate": 7.97948717948718e-07, |
|
"loss": 0.0001, |
|
"step": 18450 |
|
}, |
|
{ |
|
"epoch": 15.623678646934462, |
|
"grad_norm": 0.004917936399579048, |
|
"learning_rate": 7.851282051282053e-07, |
|
"loss": 0.0002, |
|
"step": 18475 |
|
}, |
|
{ |
|
"epoch": 15.644820295983086, |
|
"grad_norm": 0.005852636881172657, |
|
"learning_rate": 7.723076923076923e-07, |
|
"loss": 0.0001, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 15.665961945031713, |
|
"grad_norm": 0.0046966904774308205, |
|
"learning_rate": 7.594871794871795e-07, |
|
"loss": 0.0001, |
|
"step": 18525 |
|
}, |
|
{ |
|
"epoch": 15.687103594080337, |
|
"grad_norm": 0.004561580251902342, |
|
"learning_rate": 7.466666666666668e-07, |
|
"loss": 0.0001, |
|
"step": 18550 |
|
}, |
|
{ |
|
"epoch": 15.708245243128964, |
|
"grad_norm": 0.006141614634543657, |
|
"learning_rate": 7.338461538461538e-07, |
|
"loss": 0.0001, |
|
"step": 18575 |
|
}, |
|
{ |
|
"epoch": 15.72938689217759, |
|
"grad_norm": 0.005293000023812056, |
|
"learning_rate": 7.210256410256411e-07, |
|
"loss": 0.0001, |
|
"step": 18600 |
|
}, |
|
{ |
|
"epoch": 15.750528541226215, |
|
"grad_norm": 0.024633372202515602, |
|
"learning_rate": 7.082051282051283e-07, |
|
"loss": 0.0001, |
|
"step": 18625 |
|
}, |
|
{ |
|
"epoch": 15.771670190274842, |
|
"grad_norm": 0.007387631572782993, |
|
"learning_rate": 6.953846153846154e-07, |
|
"loss": 0.0001, |
|
"step": 18650 |
|
}, |
|
{ |
|
"epoch": 15.792811839323468, |
|
"grad_norm": 0.005313596688210964, |
|
"learning_rate": 6.825641025641026e-07, |
|
"loss": 0.0002, |
|
"step": 18675 |
|
}, |
|
{ |
|
"epoch": 15.813953488372093, |
|
"grad_norm": 0.004781834315508604, |
|
"learning_rate": 6.697435897435899e-07, |
|
"loss": 0.0001, |
|
"step": 18700 |
|
}, |
|
{ |
|
"epoch": 15.83509513742072, |
|
"grad_norm": 0.11602696031332016, |
|
"learning_rate": 6.569230769230769e-07, |
|
"loss": 0.0001, |
|
"step": 18725 |
|
}, |
|
{ |
|
"epoch": 15.856236786469344, |
|
"grad_norm": 0.005565613973885775, |
|
"learning_rate": 6.441025641025641e-07, |
|
"loss": 0.0001, |
|
"step": 18750 |
|
}, |
|
{ |
|
"epoch": 15.87737843551797, |
|
"grad_norm": 0.005617073271423578, |
|
"learning_rate": 6.312820512820514e-07, |
|
"loss": 0.0001, |
|
"step": 18775 |
|
}, |
|
{ |
|
"epoch": 15.898520084566597, |
|
"grad_norm": 0.005743805319070816, |
|
"learning_rate": 6.184615384615385e-07, |
|
"loss": 0.0001, |
|
"step": 18800 |
|
}, |
|
{ |
|
"epoch": 15.919661733615222, |
|
"grad_norm": 0.005840159021317959, |
|
"learning_rate": 6.056410256410257e-07, |
|
"loss": 0.0001, |
|
"step": 18825 |
|
}, |
|
{ |
|
"epoch": 15.940803382663848, |
|
"grad_norm": 0.0066063860431313515, |
|
"learning_rate": 5.928205128205128e-07, |
|
"loss": 0.0001, |
|
"step": 18850 |
|
}, |
|
{ |
|
"epoch": 15.961945031712474, |
|
"grad_norm": 0.0054490105248987675, |
|
"learning_rate": 5.800000000000001e-07, |
|
"loss": 0.0001, |
|
"step": 18875 |
|
}, |
|
{ |
|
"epoch": 15.9830866807611, |
|
"grad_norm": 0.006493702996522188, |
|
"learning_rate": 5.671794871794872e-07, |
|
"loss": 0.0007, |
|
"step": 18900 |
|
}, |
|
{ |
|
"epoch": 16.004228329809724, |
|
"grad_norm": 0.004811755381524563, |
|
"learning_rate": 5.543589743589743e-07, |
|
"loss": 0.0001, |
|
"step": 18925 |
|
}, |
|
{ |
|
"epoch": 16.02536997885835, |
|
"grad_norm": 0.005250418093055487, |
|
"learning_rate": 5.415384615384616e-07, |
|
"loss": 0.0001, |
|
"step": 18950 |
|
}, |
|
{ |
|
"epoch": 16.046511627906977, |
|
"grad_norm": 0.004121173173189163, |
|
"learning_rate": 5.287179487179487e-07, |
|
"loss": 0.0001, |
|
"step": 18975 |
|
}, |
|
{ |
|
"epoch": 16.067653276955603, |
|
"grad_norm": 0.005048220045864582, |
|
"learning_rate": 5.158974358974359e-07, |
|
"loss": 0.0001, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 16.067653276955603, |
|
"eval_loss": 0.08753260225057602, |
|
"eval_runtime": 419.2956, |
|
"eval_samples_per_second": 8.684, |
|
"eval_steps_per_second": 0.544, |
|
"eval_wer": 0.05847139078379507, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 16.08879492600423, |
|
"grad_norm": 0.005383364390581846, |
|
"learning_rate": 5.030769230769231e-07, |
|
"loss": 0.0001, |
|
"step": 19025 |
|
}, |
|
{ |
|
"epoch": 16.109936575052853, |
|
"grad_norm": 0.003624641802161932, |
|
"learning_rate": 4.902564102564103e-07, |
|
"loss": 0.0001, |
|
"step": 19050 |
|
}, |
|
{ |
|
"epoch": 16.13107822410148, |
|
"grad_norm": 0.003508235327899456, |
|
"learning_rate": 4.774358974358974e-07, |
|
"loss": 0.0001, |
|
"step": 19075 |
|
}, |
|
{ |
|
"epoch": 16.152219873150106, |
|
"grad_norm": 0.004268943332135677, |
|
"learning_rate": 4.6461538461538465e-07, |
|
"loss": 0.0001, |
|
"step": 19100 |
|
}, |
|
{ |
|
"epoch": 16.173361522198732, |
|
"grad_norm": 0.003968693781644106, |
|
"learning_rate": 4.5179487179487185e-07, |
|
"loss": 0.0001, |
|
"step": 19125 |
|
}, |
|
{ |
|
"epoch": 16.19450317124736, |
|
"grad_norm": 0.004143453668802977, |
|
"learning_rate": 4.38974358974359e-07, |
|
"loss": 0.0001, |
|
"step": 19150 |
|
}, |
|
{ |
|
"epoch": 16.21564482029598, |
|
"grad_norm": 0.004426514729857445, |
|
"learning_rate": 4.261538461538462e-07, |
|
"loss": 0.0001, |
|
"step": 19175 |
|
}, |
|
{ |
|
"epoch": 16.236786469344608, |
|
"grad_norm": 0.005651060491800308, |
|
"learning_rate": 4.133333333333334e-07, |
|
"loss": 0.0001, |
|
"step": 19200 |
|
}, |
|
{ |
|
"epoch": 16.257928118393234, |
|
"grad_norm": 0.003962742630392313, |
|
"learning_rate": 4.005128205128205e-07, |
|
"loss": 0.0001, |
|
"step": 19225 |
|
}, |
|
{ |
|
"epoch": 16.27906976744186, |
|
"grad_norm": 0.004281258676201105, |
|
"learning_rate": 3.876923076923077e-07, |
|
"loss": 0.0001, |
|
"step": 19250 |
|
}, |
|
{ |
|
"epoch": 16.300211416490487, |
|
"grad_norm": 0.003360031172633171, |
|
"learning_rate": 3.748717948717949e-07, |
|
"loss": 0.0001, |
|
"step": 19275 |
|
}, |
|
{ |
|
"epoch": 16.32135306553911, |
|
"grad_norm": 0.005053969100117683, |
|
"learning_rate": 3.6205128205128206e-07, |
|
"loss": 0.0001, |
|
"step": 19300 |
|
}, |
|
{ |
|
"epoch": 16.342494714587737, |
|
"grad_norm": 0.005359134636819363, |
|
"learning_rate": 3.4923076923076925e-07, |
|
"loss": 0.0001, |
|
"step": 19325 |
|
}, |
|
{ |
|
"epoch": 16.363636363636363, |
|
"grad_norm": 0.004870879463851452, |
|
"learning_rate": 3.3641025641025645e-07, |
|
"loss": 0.0001, |
|
"step": 19350 |
|
}, |
|
{ |
|
"epoch": 16.38477801268499, |
|
"grad_norm": 0.005147048272192478, |
|
"learning_rate": 3.235897435897436e-07, |
|
"loss": 0.0001, |
|
"step": 19375 |
|
}, |
|
{ |
|
"epoch": 16.405919661733616, |
|
"grad_norm": 0.005027804523706436, |
|
"learning_rate": 3.107692307692308e-07, |
|
"loss": 0.0001, |
|
"step": 19400 |
|
}, |
|
{ |
|
"epoch": 16.427061310782243, |
|
"grad_norm": 0.012405909597873688, |
|
"learning_rate": 2.97948717948718e-07, |
|
"loss": 0.0001, |
|
"step": 19425 |
|
}, |
|
{ |
|
"epoch": 16.448202959830866, |
|
"grad_norm": 0.004649542272090912, |
|
"learning_rate": 2.8512820512820517e-07, |
|
"loss": 0.0001, |
|
"step": 19450 |
|
}, |
|
{ |
|
"epoch": 16.469344608879492, |
|
"grad_norm": 0.004960506223142147, |
|
"learning_rate": 2.7230769230769237e-07, |
|
"loss": 0.0001, |
|
"step": 19475 |
|
}, |
|
{ |
|
"epoch": 16.49048625792812, |
|
"grad_norm": 0.00558600015938282, |
|
"learning_rate": 2.594871794871795e-07, |
|
"loss": 0.0001, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 16.511627906976745, |
|
"grad_norm": 0.00477565685287118, |
|
"learning_rate": 2.466666666666667e-07, |
|
"loss": 0.0001, |
|
"step": 19525 |
|
}, |
|
{ |
|
"epoch": 16.53276955602537, |
|
"grad_norm": 0.007015578914433718, |
|
"learning_rate": 2.3384615384615387e-07, |
|
"loss": 0.0001, |
|
"step": 19550 |
|
}, |
|
{ |
|
"epoch": 16.553911205073994, |
|
"grad_norm": 0.0036124507896602154, |
|
"learning_rate": 2.2102564102564104e-07, |
|
"loss": 0.0001, |
|
"step": 19575 |
|
}, |
|
{ |
|
"epoch": 16.57505285412262, |
|
"grad_norm": 0.005142053589224815, |
|
"learning_rate": 2.082051282051282e-07, |
|
"loss": 0.0001, |
|
"step": 19600 |
|
}, |
|
{ |
|
"epoch": 16.596194503171247, |
|
"grad_norm": 0.005827662535011768, |
|
"learning_rate": 1.953846153846154e-07, |
|
"loss": 0.0001, |
|
"step": 19625 |
|
}, |
|
{ |
|
"epoch": 16.617336152219874, |
|
"grad_norm": 0.006833038758486509, |
|
"learning_rate": 1.8256410256410257e-07, |
|
"loss": 0.0001, |
|
"step": 19650 |
|
}, |
|
{ |
|
"epoch": 16.6384778012685, |
|
"grad_norm": 0.0065496135503053665, |
|
"learning_rate": 1.6974358974358974e-07, |
|
"loss": 0.0001, |
|
"step": 19675 |
|
}, |
|
{ |
|
"epoch": 16.659619450317123, |
|
"grad_norm": 0.0090131014585495, |
|
"learning_rate": 1.5692307692307694e-07, |
|
"loss": 0.0001, |
|
"step": 19700 |
|
}, |
|
{ |
|
"epoch": 16.68076109936575, |
|
"grad_norm": 0.0042783962562680244, |
|
"learning_rate": 1.441025641025641e-07, |
|
"loss": 0.0001, |
|
"step": 19725 |
|
}, |
|
{ |
|
"epoch": 16.701902748414376, |
|
"grad_norm": 0.005526359658688307, |
|
"learning_rate": 1.3128205128205127e-07, |
|
"loss": 0.0001, |
|
"step": 19750 |
|
}, |
|
{ |
|
"epoch": 16.723044397463003, |
|
"grad_norm": 0.004802173003554344, |
|
"learning_rate": 1.1846153846153847e-07, |
|
"loss": 0.0001, |
|
"step": 19775 |
|
}, |
|
{ |
|
"epoch": 16.74418604651163, |
|
"grad_norm": 0.003740393090993166, |
|
"learning_rate": 1.0564102564102565e-07, |
|
"loss": 0.0001, |
|
"step": 19800 |
|
}, |
|
{ |
|
"epoch": 16.765327695560252, |
|
"grad_norm": 0.004182030912488699, |
|
"learning_rate": 9.282051282051283e-08, |
|
"loss": 0.0001, |
|
"step": 19825 |
|
}, |
|
{ |
|
"epoch": 16.78646934460888, |
|
"grad_norm": 0.004386850632727146, |
|
"learning_rate": 8e-08, |
|
"loss": 0.0001, |
|
"step": 19850 |
|
}, |
|
{ |
|
"epoch": 16.807610993657505, |
|
"grad_norm": 0.005020872224122286, |
|
"learning_rate": 6.717948717948718e-08, |
|
"loss": 0.0001, |
|
"step": 19875 |
|
}, |
|
{ |
|
"epoch": 16.82875264270613, |
|
"grad_norm": 0.0037505626678466797, |
|
"learning_rate": 5.435897435897436e-08, |
|
"loss": 0.0001, |
|
"step": 19900 |
|
}, |
|
{ |
|
"epoch": 16.849894291754758, |
|
"grad_norm": 0.004394158720970154, |
|
"learning_rate": 4.153846153846154e-08, |
|
"loss": 0.0001, |
|
"step": 19925 |
|
}, |
|
{ |
|
"epoch": 16.87103594080338, |
|
"grad_norm": 0.004075503908097744, |
|
"learning_rate": 2.8717948717948722e-08, |
|
"loss": 0.0001, |
|
"step": 19950 |
|
}, |
|
{ |
|
"epoch": 16.892177589852007, |
|
"grad_norm": 0.0045666322112083435, |
|
"learning_rate": 1.5897435897435897e-08, |
|
"loss": 0.0001, |
|
"step": 19975 |
|
}, |
|
{ |
|
"epoch": 16.913319238900634, |
|
"grad_norm": 0.006132797803729773, |
|
"learning_rate": 3.0769230769230774e-09, |
|
"loss": 0.0001, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 16.913319238900634, |
|
"eval_loss": 0.08780684322118759, |
|
"eval_runtime": 420.9914, |
|
"eval_samples_per_second": 8.649, |
|
"eval_steps_per_second": 0.542, |
|
"eval_wer": 0.05902826117221217, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 16.913319238900634, |
|
"step": 20000, |
|
"total_flos": 1.8463463044153344e+20, |
|
"train_loss": 0.02644415222366224, |
|
"train_runtime": 72777.8888, |
|
"train_samples_per_second": 8.794, |
|
"train_steps_per_second": 0.275 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 20000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 17, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.8463463044153344e+20, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|