|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9992841803865425, |
|
"eval_steps": 88, |
|
"global_step": 349, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.002863278453829635, |
|
"grad_norm": 1.7433022469392645, |
|
"learning_rate": 2.9411764705882356e-07, |
|
"loss": 2.5227, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.002863278453829635, |
|
"eval_loss": 2.9798059463500977, |
|
"eval_runtime": 15.4107, |
|
"eval_samples_per_second": 14.146, |
|
"eval_steps_per_second": 2.401, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00572655690765927, |
|
"grad_norm": 1.7260085141016746, |
|
"learning_rate": 5.882352941176471e-07, |
|
"loss": 2.5557, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.008589835361488905, |
|
"grad_norm": 1.6875415011788866, |
|
"learning_rate": 8.823529411764707e-07, |
|
"loss": 2.5153, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01145311381531854, |
|
"grad_norm": 1.501231559534811, |
|
"learning_rate": 1.1764705882352942e-06, |
|
"loss": 2.5383, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.014316392269148175, |
|
"grad_norm": 1.3386535922727172, |
|
"learning_rate": 1.4705882352941177e-06, |
|
"loss": 2.5313, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01717967072297781, |
|
"grad_norm": 1.4453252560511072, |
|
"learning_rate": 1.7647058823529414e-06, |
|
"loss": 2.5023, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.020042949176807445, |
|
"grad_norm": 1.2649460157616863, |
|
"learning_rate": 2.058823529411765e-06, |
|
"loss": 2.516, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02290622763063708, |
|
"grad_norm": 1.2272485421450576, |
|
"learning_rate": 2.3529411764705885e-06, |
|
"loss": 2.4859, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.025769506084466716, |
|
"grad_norm": 0.9566083602405432, |
|
"learning_rate": 2.647058823529412e-06, |
|
"loss": 2.505, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02863278453829635, |
|
"grad_norm": 0.9324778074733336, |
|
"learning_rate": 2.9411764705882355e-06, |
|
"loss": 2.5398, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.031496062992125984, |
|
"grad_norm": 0.9121134598727566, |
|
"learning_rate": 3.2352941176470594e-06, |
|
"loss": 2.4925, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03435934144595562, |
|
"grad_norm": 0.7810248065316661, |
|
"learning_rate": 3.529411764705883e-06, |
|
"loss": 2.5252, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.03722261989978525, |
|
"grad_norm": 0.7605027751280174, |
|
"learning_rate": 3.8235294117647055e-06, |
|
"loss": 2.4853, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.04008589835361489, |
|
"grad_norm": 0.7103184324186846, |
|
"learning_rate": 4.11764705882353e-06, |
|
"loss": 2.5164, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.04294917680744453, |
|
"grad_norm": 0.7150261519787532, |
|
"learning_rate": 4.411764705882353e-06, |
|
"loss": 2.5179, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04581245526127416, |
|
"grad_norm": 0.6401893492590393, |
|
"learning_rate": 4.705882352941177e-06, |
|
"loss": 2.5337, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.048675733715103794, |
|
"grad_norm": 0.6382120155027352, |
|
"learning_rate": 5e-06, |
|
"loss": 2.4971, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.05153901216893343, |
|
"grad_norm": 0.638974871748697, |
|
"learning_rate": 5.294117647058824e-06, |
|
"loss": 2.5137, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.05440229062276306, |
|
"grad_norm": 0.6276417784569189, |
|
"learning_rate": 5.588235294117647e-06, |
|
"loss": 2.5221, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0572655690765927, |
|
"grad_norm": 0.6267712807753728, |
|
"learning_rate": 5.882352941176471e-06, |
|
"loss": 2.4958, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.06012884753042233, |
|
"grad_norm": 0.6277644572158624, |
|
"learning_rate": 6.176470588235295e-06, |
|
"loss": 2.4878, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.06299212598425197, |
|
"grad_norm": 0.6127762919814617, |
|
"learning_rate": 6.470588235294119e-06, |
|
"loss": 2.4914, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0658554044380816, |
|
"grad_norm": 0.6038751254503039, |
|
"learning_rate": 6.764705882352942e-06, |
|
"loss": 2.51, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.06871868289191124, |
|
"grad_norm": 0.6003698592486557, |
|
"learning_rate": 7.058823529411766e-06, |
|
"loss": 2.4906, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.07158196134574088, |
|
"grad_norm": 0.6282240768376209, |
|
"learning_rate": 7.352941176470589e-06, |
|
"loss": 2.5082, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0744452397995705, |
|
"grad_norm": 0.6087841519965377, |
|
"learning_rate": 7.647058823529411e-06, |
|
"loss": 2.4878, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.07730851825340014, |
|
"grad_norm": 0.5860330254627644, |
|
"learning_rate": 7.941176470588236e-06, |
|
"loss": 2.5182, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.08017179670722978, |
|
"grad_norm": 0.5939132759806514, |
|
"learning_rate": 8.23529411764706e-06, |
|
"loss": 2.501, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.08303507516105942, |
|
"grad_norm": 0.6359589764914113, |
|
"learning_rate": 8.529411764705883e-06, |
|
"loss": 2.523, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.08589835361488905, |
|
"grad_norm": 0.6255572369664097, |
|
"learning_rate": 8.823529411764707e-06, |
|
"loss": 2.4931, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08876163206871868, |
|
"grad_norm": 0.5910507560604619, |
|
"learning_rate": 9.11764705882353e-06, |
|
"loss": 2.4838, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.09162491052254831, |
|
"grad_norm": 0.581588749176898, |
|
"learning_rate": 9.411764705882354e-06, |
|
"loss": 2.4853, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.09448818897637795, |
|
"grad_norm": 0.6483321707461923, |
|
"learning_rate": 9.705882352941177e-06, |
|
"loss": 2.4944, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.09735146743020759, |
|
"grad_norm": 0.5987568368261761, |
|
"learning_rate": 1e-05, |
|
"loss": 2.4699, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.10021474588403723, |
|
"grad_norm": 0.6206579432531898, |
|
"learning_rate": 9.999751334779716e-06, |
|
"loss": 2.523, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.10307802433786686, |
|
"grad_norm": 0.6064407921800384, |
|
"learning_rate": 9.999005363852619e-06, |
|
"loss": 2.5203, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.10594130279169649, |
|
"grad_norm": 0.5721412252832796, |
|
"learning_rate": 9.997762161417517e-06, |
|
"loss": 2.5012, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.10880458124552612, |
|
"grad_norm": 0.5497451217796617, |
|
"learning_rate": 9.996021851130897e-06, |
|
"loss": 2.4914, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.11166785969935576, |
|
"grad_norm": 0.5470018162226529, |
|
"learning_rate": 9.993784606094612e-06, |
|
"loss": 2.501, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1145311381531854, |
|
"grad_norm": 0.5866491336876943, |
|
"learning_rate": 9.991050648838676e-06, |
|
"loss": 2.48, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.11739441660701504, |
|
"grad_norm": 0.5438529330919207, |
|
"learning_rate": 9.987820251299121e-06, |
|
"loss": 2.4983, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.12025769506084466, |
|
"grad_norm": 0.5740915531654763, |
|
"learning_rate": 9.984093734790955e-06, |
|
"loss": 2.5182, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.1231209735146743, |
|
"grad_norm": 0.5664275625259377, |
|
"learning_rate": 9.979871469976197e-06, |
|
"loss": 2.4862, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.12598425196850394, |
|
"grad_norm": 0.5926385697048842, |
|
"learning_rate": 9.975153876827008e-06, |
|
"loss": 2.4896, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.12884753042233357, |
|
"grad_norm": 0.574391809445724, |
|
"learning_rate": 9.969941424583926e-06, |
|
"loss": 2.5367, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1317108088761632, |
|
"grad_norm": 0.5397781778798922, |
|
"learning_rate": 9.964234631709188e-06, |
|
"loss": 2.5025, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.13457408732999285, |
|
"grad_norm": 0.5539122724303173, |
|
"learning_rate": 9.958034065835151e-06, |
|
"loss": 2.5278, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.13743736578382248, |
|
"grad_norm": 0.5818328936194783, |
|
"learning_rate": 9.951340343707852e-06, |
|
"loss": 2.5188, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.14030064423765212, |
|
"grad_norm": 0.7458319373172223, |
|
"learning_rate": 9.944154131125643e-06, |
|
"loss": 2.4617, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.14316392269148176, |
|
"grad_norm": 0.5661010380152568, |
|
"learning_rate": 9.936476142872979e-06, |
|
"loss": 2.4926, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.14602720114531137, |
|
"grad_norm": 0.5650834703916657, |
|
"learning_rate": 9.928307142649315e-06, |
|
"loss": 2.4848, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.148890479599141, |
|
"grad_norm": 0.548453507209605, |
|
"learning_rate": 9.91964794299315e-06, |
|
"loss": 2.4969, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.15175375805297064, |
|
"grad_norm": 0.5758188221734539, |
|
"learning_rate": 9.910499405201195e-06, |
|
"loss": 2.484, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.15461703650680028, |
|
"grad_norm": 0.5753520047837293, |
|
"learning_rate": 9.900862439242719e-06, |
|
"loss": 2.4941, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.15748031496062992, |
|
"grad_norm": 0.5769855830607382, |
|
"learning_rate": 9.890738003669029e-06, |
|
"loss": 2.4856, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.16034359341445956, |
|
"grad_norm": 0.5490460548187522, |
|
"learning_rate": 9.880127105518122e-06, |
|
"loss": 2.5226, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.1632068718682892, |
|
"grad_norm": 0.601542983516753, |
|
"learning_rate": 9.869030800214531e-06, |
|
"loss": 2.5096, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.16607015032211883, |
|
"grad_norm": 0.5613978478420705, |
|
"learning_rate": 9.857450191464337e-06, |
|
"loss": 2.5145, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.16893342877594847, |
|
"grad_norm": 0.5600847385523823, |
|
"learning_rate": 9.84538643114539e-06, |
|
"loss": 2.5111, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.1717967072297781, |
|
"grad_norm": 0.5401691538446458, |
|
"learning_rate": 9.832840719192737e-06, |
|
"loss": 2.498, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.17465998568360774, |
|
"grad_norm": 0.5520094893813158, |
|
"learning_rate": 9.819814303479268e-06, |
|
"loss": 2.494, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.17752326413743735, |
|
"grad_norm": 0.5497902576608895, |
|
"learning_rate": 9.806308479691595e-06, |
|
"loss": 2.5017, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.180386542591267, |
|
"grad_norm": 0.525740103564976, |
|
"learning_rate": 9.792324591201179e-06, |
|
"loss": 2.5068, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.18324982104509663, |
|
"grad_norm": 0.5544436996206911, |
|
"learning_rate": 9.777864028930705e-06, |
|
"loss": 2.4976, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.18611309949892627, |
|
"grad_norm": 0.555608149815371, |
|
"learning_rate": 9.762928231215731e-06, |
|
"loss": 2.5112, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.1889763779527559, |
|
"grad_norm": 0.5356931315907684, |
|
"learning_rate": 9.747518683661632e-06, |
|
"loss": 2.4967, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.19183965640658554, |
|
"grad_norm": 0.5603711133278504, |
|
"learning_rate": 9.731636918995821e-06, |
|
"loss": 2.5134, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.19470293486041518, |
|
"grad_norm": 0.5414742895107459, |
|
"learning_rate": 9.715284516915303e-06, |
|
"loss": 2.525, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.19756621331424482, |
|
"grad_norm": 0.5348649401490828, |
|
"learning_rate": 9.698463103929542e-06, |
|
"loss": 2.4903, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.20042949176807445, |
|
"grad_norm": 0.5179063198083461, |
|
"learning_rate": 9.681174353198687e-06, |
|
"loss": 2.5107, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2032927702219041, |
|
"grad_norm": 0.5246232399143359, |
|
"learning_rate": 9.663419984367139e-06, |
|
"loss": 2.5203, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.20615604867573373, |
|
"grad_norm": 0.5289273463036179, |
|
"learning_rate": 9.645201763392513e-06, |
|
"loss": 2.5053, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.20901932712956334, |
|
"grad_norm": 0.5433820476794674, |
|
"learning_rate": 9.626521502369984e-06, |
|
"loss": 2.4764, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.21188260558339297, |
|
"grad_norm": 0.5505407639513397, |
|
"learning_rate": 9.60738105935204e-06, |
|
"loss": 2.4838, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.2147458840372226, |
|
"grad_norm": 0.5690742761627252, |
|
"learning_rate": 9.58778233816367e-06, |
|
"loss": 2.4774, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.21760916249105225, |
|
"grad_norm": 0.514161333996788, |
|
"learning_rate": 9.567727288213005e-06, |
|
"loss": 2.4778, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.2204724409448819, |
|
"grad_norm": 0.5512425303188625, |
|
"learning_rate": 9.547217904297411e-06, |
|
"loss": 2.488, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.22333571939871152, |
|
"grad_norm": 0.547345521950976, |
|
"learning_rate": 9.526256226405075e-06, |
|
"loss": 2.5231, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.22619899785254116, |
|
"grad_norm": 0.572668835030032, |
|
"learning_rate": 9.504844339512096e-06, |
|
"loss": 2.4904, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.2290622763063708, |
|
"grad_norm": 0.5586691177416543, |
|
"learning_rate": 9.482984373375105e-06, |
|
"loss": 2.5099, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.23192555476020044, |
|
"grad_norm": 0.5580296085776706, |
|
"learning_rate": 9.460678502319419e-06, |
|
"loss": 2.4894, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.23478883321403007, |
|
"grad_norm": 0.5485012119106963, |
|
"learning_rate": 9.437928945022772e-06, |
|
"loss": 2.4949, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.2376521116678597, |
|
"grad_norm": 0.543333232228658, |
|
"learning_rate": 9.414737964294636e-06, |
|
"loss": 2.4868, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.24051539012168932, |
|
"grad_norm": 0.5890968894329803, |
|
"learning_rate": 9.391107866851143e-06, |
|
"loss": 2.5089, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.24337866857551896, |
|
"grad_norm": 0.5506851813496826, |
|
"learning_rate": 9.36704100308565e-06, |
|
"loss": 2.4458, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2462419470293486, |
|
"grad_norm": 0.5504307062507774, |
|
"learning_rate": 9.342539766834945e-06, |
|
"loss": 2.4797, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.24910522548317823, |
|
"grad_norm": 0.548732984699912, |
|
"learning_rate": 9.317606595141156e-06, |
|
"loss": 2.5029, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.25196850393700787, |
|
"grad_norm": 0.5505255392480715, |
|
"learning_rate": 9.292243968009332e-06, |
|
"loss": 2.5027, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.25196850393700787, |
|
"eval_loss": 2.9500625133514404, |
|
"eval_runtime": 15.442, |
|
"eval_samples_per_second": 14.117, |
|
"eval_steps_per_second": 2.396, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.25483178239083754, |
|
"grad_norm": 0.5482944395523252, |
|
"learning_rate": 9.266454408160779e-06, |
|
"loss": 2.5322, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.25769506084466715, |
|
"grad_norm": 0.541771069630789, |
|
"learning_rate": 9.24024048078213e-06, |
|
"loss": 2.5155, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.26055833929849676, |
|
"grad_norm": 0.5325088342727101, |
|
"learning_rate": 9.213604793270196e-06, |
|
"loss": 2.4651, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.2634216177523264, |
|
"grad_norm": 0.5265061377076917, |
|
"learning_rate": 9.186549994972618e-06, |
|
"loss": 2.4755, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.26628489620615603, |
|
"grad_norm": 0.5466047048898959, |
|
"learning_rate": 9.159078776924347e-06, |
|
"loss": 2.4785, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.2691481746599857, |
|
"grad_norm": 0.5303191520111897, |
|
"learning_rate": 9.131193871579975e-06, |
|
"loss": 2.5137, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.2720114531138153, |
|
"grad_norm": 0.5517463944794626, |
|
"learning_rate": 9.102898052541959e-06, |
|
"loss": 2.4892, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.27487473156764497, |
|
"grad_norm": 0.5100270059915952, |
|
"learning_rate": 9.074194134284726e-06, |
|
"loss": 2.5048, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.2777380100214746, |
|
"grad_norm": 0.5278965930755488, |
|
"learning_rate": 9.045084971874738e-06, |
|
"loss": 2.4806, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.28060128847530424, |
|
"grad_norm": 0.5386440502957012, |
|
"learning_rate": 9.01557346068651e-06, |
|
"loss": 2.4637, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.28346456692913385, |
|
"grad_norm": 0.5424442293047355, |
|
"learning_rate": 8.985662536114614e-06, |
|
"loss": 2.4731, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.2863278453829635, |
|
"grad_norm": 0.5447548421095255, |
|
"learning_rate": 8.955355173281709e-06, |
|
"loss": 2.456, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.28919112383679313, |
|
"grad_norm": 0.5056892441431114, |
|
"learning_rate": 8.924654386742613e-06, |
|
"loss": 2.5027, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.29205440229062274, |
|
"grad_norm": 0.5577307275143056, |
|
"learning_rate": 8.89356323018447e-06, |
|
"loss": 2.5396, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.2949176807444524, |
|
"grad_norm": 0.5269321742421326, |
|
"learning_rate": 8.862084796122998e-06, |
|
"loss": 2.5348, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.297780959198282, |
|
"grad_norm": 0.573483487118278, |
|
"learning_rate": 8.83022221559489e-06, |
|
"loss": 2.5129, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.3006442376521117, |
|
"grad_norm": 0.5053757854271658, |
|
"learning_rate": 8.797978657846391e-06, |
|
"loss": 2.4992, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.3035075161059413, |
|
"grad_norm": 0.5463649574920005, |
|
"learning_rate": 8.765357330018056e-06, |
|
"loss": 2.4745, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.30637079455977095, |
|
"grad_norm": 0.5380399569585859, |
|
"learning_rate": 8.732361476825752e-06, |
|
"loss": 2.5305, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.30923407301360056, |
|
"grad_norm": 0.501404327425355, |
|
"learning_rate": 8.698994380237921e-06, |
|
"loss": 2.493, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.31209735146743023, |
|
"grad_norm": 0.5204332392598845, |
|
"learning_rate": 8.665259359149132e-06, |
|
"loss": 2.4834, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.31496062992125984, |
|
"grad_norm": 0.5172941461879834, |
|
"learning_rate": 8.631159769049965e-06, |
|
"loss": 2.4663, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.3178239083750895, |
|
"grad_norm": 0.5633507992944607, |
|
"learning_rate": 8.596699001693257e-06, |
|
"loss": 2.5106, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.3206871868289191, |
|
"grad_norm": 0.5111870840713941, |
|
"learning_rate": 8.561880484756726e-06, |
|
"loss": 2.4961, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.3235504652827487, |
|
"grad_norm": 0.5204943642446078, |
|
"learning_rate": 8.526707681502045e-06, |
|
"loss": 2.459, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.3264137437365784, |
|
"grad_norm": 0.5243848667219405, |
|
"learning_rate": 8.491184090430365e-06, |
|
"loss": 2.4847, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.329277022190408, |
|
"grad_norm": 0.5095475752397022, |
|
"learning_rate": 8.455313244934324e-06, |
|
"loss": 2.5041, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.33214030064423766, |
|
"grad_norm": 0.5324839977569346, |
|
"learning_rate": 8.4190987129466e-06, |
|
"loss": 2.4581, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.3350035790980673, |
|
"grad_norm": 0.537216858096812, |
|
"learning_rate": 8.382544096585028e-06, |
|
"loss": 2.4956, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.33786685755189694, |
|
"grad_norm": 0.5026214156562598, |
|
"learning_rate": 8.345653031794292e-06, |
|
"loss": 2.5109, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.34073013600572655, |
|
"grad_norm": 0.5589087292743029, |
|
"learning_rate": 8.308429187984298e-06, |
|
"loss": 2.4508, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.3435934144595562, |
|
"grad_norm": 0.5175402219605563, |
|
"learning_rate": 8.270876267665173e-06, |
|
"loss": 2.4915, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3464566929133858, |
|
"grad_norm": 0.5228528180395297, |
|
"learning_rate": 8.232998006078998e-06, |
|
"loss": 2.4766, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.3493199713672155, |
|
"grad_norm": 0.5654685794544859, |
|
"learning_rate": 8.19479817082828e-06, |
|
"loss": 2.4669, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.3521832498210451, |
|
"grad_norm": 0.517826924265438, |
|
"learning_rate": 8.156280561501196e-06, |
|
"loss": 2.4913, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.3550465282748747, |
|
"grad_norm": 0.5546777654441133, |
|
"learning_rate": 8.117449009293668e-06, |
|
"loss": 2.4551, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.35790980672870437, |
|
"grad_norm": 0.5128294773578065, |
|
"learning_rate": 8.078307376628292e-06, |
|
"loss": 2.5143, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.360773085182534, |
|
"grad_norm": 0.5181572014660524, |
|
"learning_rate": 8.038859556770152e-06, |
|
"loss": 2.4918, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 0.5580601606690623, |
|
"learning_rate": 7.99910947343957e-06, |
|
"loss": 2.4703, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.36649964209019326, |
|
"grad_norm": 0.5310828252758204, |
|
"learning_rate": 7.95906108042184e-06, |
|
"loss": 2.4942, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.3693629205440229, |
|
"grad_norm": 0.5572432262386191, |
|
"learning_rate": 7.918718361173951e-06, |
|
"loss": 2.5351, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.37222619899785253, |
|
"grad_norm": 0.5627081677105857, |
|
"learning_rate": 7.87808532842837e-06, |
|
"loss": 2.4636, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.3750894774516822, |
|
"grad_norm": 0.5613271800153685, |
|
"learning_rate": 7.83716602379391e-06, |
|
"loss": 2.4953, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.3779527559055118, |
|
"grad_norm": 0.5394988971278871, |
|
"learning_rate": 7.795964517353734e-06, |
|
"loss": 2.5189, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.38081603435934147, |
|
"grad_norm": 0.5313679498243665, |
|
"learning_rate": 7.754484907260513e-06, |
|
"loss": 2.4768, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.3836793128131711, |
|
"grad_norm": 0.5703379561975805, |
|
"learning_rate": 7.712731319328798e-06, |
|
"loss": 2.4659, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.3865425912670007, |
|
"grad_norm": 0.49255468232562744, |
|
"learning_rate": 7.670707906624644e-06, |
|
"loss": 2.4989, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.38940586972083036, |
|
"grad_norm": 0.5185698028962875, |
|
"learning_rate": 7.628418849052523e-06, |
|
"loss": 2.4941, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.39226914817465997, |
|
"grad_norm": 0.5378386670092136, |
|
"learning_rate": 7.585868352939564e-06, |
|
"loss": 2.4772, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.39513242662848963, |
|
"grad_norm": 0.5379669385240866, |
|
"learning_rate": 7.543060650617159e-06, |
|
"loss": 2.4902, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.39799570508231924, |
|
"grad_norm": 0.527227129367134, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 2.4898, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.4008589835361489, |
|
"grad_norm": 0.5185278503926084, |
|
"learning_rate": 7.456690684162557e-06, |
|
"loss": 2.479, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.4037222619899785, |
|
"grad_norm": 0.5028759823161459, |
|
"learning_rate": 7.413137010913055e-06, |
|
"loss": 2.5108, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.4065855404438082, |
|
"grad_norm": 0.5323427436694725, |
|
"learning_rate": 7.369343312364994e-06, |
|
"loss": 2.4895, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.4094488188976378, |
|
"grad_norm": 0.5303373348854972, |
|
"learning_rate": 7.3253139445062535e-06, |
|
"loss": 2.4831, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.41231209735146745, |
|
"grad_norm": 0.5171505606227076, |
|
"learning_rate": 7.281053286765816e-06, |
|
"loss": 2.4691, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.41517537580529706, |
|
"grad_norm": 0.5007625110632795, |
|
"learning_rate": 7.236565741578163e-06, |
|
"loss": 2.4864, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.4180386542591267, |
|
"grad_norm": 0.5192090084896421, |
|
"learning_rate": 7.191855733945388e-06, |
|
"loss": 2.5329, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.42090193271295634, |
|
"grad_norm": 0.5408008884476913, |
|
"learning_rate": 7.146927710997047e-06, |
|
"loss": 2.4846, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.42376521116678595, |
|
"grad_norm": 0.4927740392753858, |
|
"learning_rate": 7.101786141547829e-06, |
|
"loss": 2.458, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.4266284896206156, |
|
"grad_norm": 0.5063973450614866, |
|
"learning_rate": 7.056435515653059e-06, |
|
"loss": 2.4772, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.4294917680744452, |
|
"grad_norm": 0.5403619047312843, |
|
"learning_rate": 7.010880344162087e-06, |
|
"loss": 2.4837, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.4323550465282749, |
|
"grad_norm": 0.4922676337577914, |
|
"learning_rate": 6.965125158269619e-06, |
|
"loss": 2.4819, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.4352183249821045, |
|
"grad_norm": 0.521792840976189, |
|
"learning_rate": 6.919174509065003e-06, |
|
"loss": 2.5105, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.43808160343593416, |
|
"grad_norm": 0.5219763904404502, |
|
"learning_rate": 6.873032967079562e-06, |
|
"loss": 2.4779, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.4409448818897638, |
|
"grad_norm": 0.5154672827971456, |
|
"learning_rate": 6.8267051218319766e-06, |
|
"loss": 2.52, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.44380816034359344, |
|
"grad_norm": 0.5301945278339433, |
|
"learning_rate": 6.780195581371785e-06, |
|
"loss": 2.4721, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.44667143879742305, |
|
"grad_norm": 0.5014158054403869, |
|
"learning_rate": 6.733508971821037e-06, |
|
"loss": 2.4652, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.44953471725125266, |
|
"grad_norm": 0.5221719192401763, |
|
"learning_rate": 6.686649936914151e-06, |
|
"loss": 2.5095, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.4523979957050823, |
|
"grad_norm": 0.5633793319541605, |
|
"learning_rate": 6.639623137536023e-06, |
|
"loss": 2.5088, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.45526127415891193, |
|
"grad_norm": 0.503797400013898, |
|
"learning_rate": 6.592433251258423e-06, |
|
"loss": 2.4392, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.4581245526127416, |
|
"grad_norm": 0.5353571393146068, |
|
"learning_rate": 6.545084971874738e-06, |
|
"loss": 2.4806, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.4609878310665712, |
|
"grad_norm": 0.5394132569835262, |
|
"learning_rate": 6.497583008933097e-06, |
|
"loss": 2.4437, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.4638511095204009, |
|
"grad_norm": 0.49177737499930074, |
|
"learning_rate": 6.449932087267932e-06, |
|
"loss": 2.4602, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.4667143879742305, |
|
"grad_norm": 0.5299162942575597, |
|
"learning_rate": 6.402136946530014e-06, |
|
"loss": 2.458, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.46957766642806015, |
|
"grad_norm": 0.5268297730689246, |
|
"learning_rate": 6.354202340715027e-06, |
|
"loss": 2.4701, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.47244094488188976, |
|
"grad_norm": 0.5120064053501564, |
|
"learning_rate": 6.306133037690693e-06, |
|
"loss": 2.485, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.4753042233357194, |
|
"grad_norm": 0.5110676269579153, |
|
"learning_rate": 6.257933818722544e-06, |
|
"loss": 2.4721, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.47816750178954903, |
|
"grad_norm": 0.5476368667166479, |
|
"learning_rate": 6.209609477998339e-06, |
|
"loss": 2.4884, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.48103078024337864, |
|
"grad_norm": 0.4764530610045073, |
|
"learning_rate": 6.161164822151213e-06, |
|
"loss": 2.4384, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.4838940586972083, |
|
"grad_norm": 0.5083738885386776, |
|
"learning_rate": 6.112604669781572e-06, |
|
"loss": 2.5163, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.4867573371510379, |
|
"grad_norm": 0.5721234428494102, |
|
"learning_rate": 6.063933850977811e-06, |
|
"loss": 2.4187, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.4896206156048676, |
|
"grad_norm": 0.48356591072844135, |
|
"learning_rate": 6.015157206835881e-06, |
|
"loss": 2.4661, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.4924838940586972, |
|
"grad_norm": 0.5318155836725099, |
|
"learning_rate": 5.9662795889777666e-06, |
|
"loss": 2.4874, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.49534717251252686, |
|
"grad_norm": 0.532932583531948, |
|
"learning_rate": 5.917305859068912e-06, |
|
"loss": 2.4611, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.49821045096635647, |
|
"grad_norm": 0.4992558226313865, |
|
"learning_rate": 5.8682408883346535e-06, |
|
"loss": 2.4847, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.5010737294201861, |
|
"grad_norm": 0.49892566334047106, |
|
"learning_rate": 5.819089557075689e-06, |
|
"loss": 2.4672, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.5039370078740157, |
|
"grad_norm": 0.5162760616236272, |
|
"learning_rate": 5.769856754182668e-06, |
|
"loss": 2.481, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.5039370078740157, |
|
"eval_loss": 2.939789295196533, |
|
"eval_runtime": 15.4591, |
|
"eval_samples_per_second": 14.102, |
|
"eval_steps_per_second": 2.393, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.5068002863278454, |
|
"grad_norm": 0.5155049998342655, |
|
"learning_rate": 5.720547376649901e-06, |
|
"loss": 2.4383, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.5096635647816751, |
|
"grad_norm": 0.49383297653530694, |
|
"learning_rate": 5.671166329088278e-06, |
|
"loss": 2.4963, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.5125268432355047, |
|
"grad_norm": 0.5451897427189254, |
|
"learning_rate": 5.621718523237427e-06, |
|
"loss": 2.458, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.5153901216893343, |
|
"grad_norm": 0.5067050740905722, |
|
"learning_rate": 5.57220887747716e-06, |
|
"loss": 2.4615, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.5182534001431639, |
|
"grad_norm": 0.5118844197490995, |
|
"learning_rate": 5.522642316338268e-06, |
|
"loss": 2.4782, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.5211166785969935, |
|
"grad_norm": 0.49843713068761214, |
|
"learning_rate": 5.473023770012686e-06, |
|
"loss": 2.4639, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.5239799570508232, |
|
"grad_norm": 0.49039624756309863, |
|
"learning_rate": 5.423358173863117e-06, |
|
"loss": 2.5034, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.5268432355046528, |
|
"grad_norm": 0.5397522237987987, |
|
"learning_rate": 5.373650467932122e-06, |
|
"loss": 2.4828, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.5297065139584825, |
|
"grad_norm": 0.46938500048241816, |
|
"learning_rate": 5.323905596450759e-06, |
|
"loss": 2.4896, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.5325697924123121, |
|
"grad_norm": 0.46678391598361535, |
|
"learning_rate": 5.274128507346801e-06, |
|
"loss": 2.513, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.5354330708661418, |
|
"grad_norm": 0.497328791756282, |
|
"learning_rate": 5.224324151752575e-06, |
|
"loss": 2.4681, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.5382963493199714, |
|
"grad_norm": 0.49519258041200537, |
|
"learning_rate": 5.174497483512506e-06, |
|
"loss": 2.5165, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.541159627773801, |
|
"grad_norm": 0.51742332377377, |
|
"learning_rate": 5.1246534586903655e-06, |
|
"loss": 2.5054, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.5440229062276306, |
|
"grad_norm": 0.47861405099169874, |
|
"learning_rate": 5.074797035076319e-06, |
|
"loss": 2.4731, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.5468861846814602, |
|
"grad_norm": 0.49802176173801116, |
|
"learning_rate": 5.024933171693791e-06, |
|
"loss": 2.4637, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.5497494631352899, |
|
"grad_norm": 0.4840967047698607, |
|
"learning_rate": 4.9750668283062104e-06, |
|
"loss": 2.4968, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.5526127415891195, |
|
"grad_norm": 0.47135451227221825, |
|
"learning_rate": 4.9252029649236835e-06, |
|
"loss": 2.4511, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.5554760200429492, |
|
"grad_norm": 0.48895069084444304, |
|
"learning_rate": 4.875346541309637e-06, |
|
"loss": 2.4481, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.5583392984967788, |
|
"grad_norm": 0.4965170322744245, |
|
"learning_rate": 4.825502516487497e-06, |
|
"loss": 2.4937, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.5612025769506085, |
|
"grad_norm": 0.4978261261637217, |
|
"learning_rate": 4.775675848247427e-06, |
|
"loss": 2.479, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.5640658554044381, |
|
"grad_norm": 0.4773588207221208, |
|
"learning_rate": 4.7258714926532e-06, |
|
"loss": 2.492, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.5669291338582677, |
|
"grad_norm": 0.4759022046084612, |
|
"learning_rate": 4.676094403549241e-06, |
|
"loss": 2.4585, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.5697924123120973, |
|
"grad_norm": 0.48965996741234036, |
|
"learning_rate": 4.626349532067879e-06, |
|
"loss": 2.4761, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.572655690765927, |
|
"grad_norm": 0.46747661270431873, |
|
"learning_rate": 4.576641826136884e-06, |
|
"loss": 2.4748, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.5755189692197566, |
|
"grad_norm": 0.47493437805144983, |
|
"learning_rate": 4.526976229987315e-06, |
|
"loss": 2.4631, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.5783822476735863, |
|
"grad_norm": 0.4915217558336455, |
|
"learning_rate": 4.477357683661734e-06, |
|
"loss": 2.4768, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.5812455261274159, |
|
"grad_norm": 0.4793297296845006, |
|
"learning_rate": 4.427791122522841e-06, |
|
"loss": 2.4861, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.5841088045812455, |
|
"grad_norm": 0.4847288969292467, |
|
"learning_rate": 4.3782814767625755e-06, |
|
"loss": 2.4528, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.5869720830350752, |
|
"grad_norm": 0.46840483063360394, |
|
"learning_rate": 4.3288336709117246e-06, |
|
"loss": 2.4844, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.5898353614889048, |
|
"grad_norm": 0.49201004049650826, |
|
"learning_rate": 4.279452623350101e-06, |
|
"loss": 2.4908, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.5926986399427344, |
|
"grad_norm": 0.49621336600388116, |
|
"learning_rate": 4.230143245817332e-06, |
|
"loss": 2.4997, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.595561918396564, |
|
"grad_norm": 0.4922422256843438, |
|
"learning_rate": 4.180910442924312e-06, |
|
"loss": 2.4725, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.5984251968503937, |
|
"grad_norm": 0.49243609704503866, |
|
"learning_rate": 4.131759111665349e-06, |
|
"loss": 2.4525, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.6012884753042234, |
|
"grad_norm": 0.5310002493376136, |
|
"learning_rate": 4.0826941409310885e-06, |
|
"loss": 2.4959, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.604151753758053, |
|
"grad_norm": 0.485979098182275, |
|
"learning_rate": 4.033720411022235e-06, |
|
"loss": 2.4817, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.6070150322118826, |
|
"grad_norm": 0.4893545713811437, |
|
"learning_rate": 3.98484279316412e-06, |
|
"loss": 2.4926, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.6098783106657122, |
|
"grad_norm": 0.5177988359519327, |
|
"learning_rate": 3.936066149022191e-06, |
|
"loss": 2.5085, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.6127415891195419, |
|
"grad_norm": 0.49306653272715745, |
|
"learning_rate": 3.887395330218429e-06, |
|
"loss": 2.473, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.6156048675733715, |
|
"grad_norm": 0.47728344715096344, |
|
"learning_rate": 3.8388351778487884e-06, |
|
"loss": 2.4729, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.6184681460272011, |
|
"grad_norm": 0.47456363408410085, |
|
"learning_rate": 3.790390522001662e-06, |
|
"loss": 2.4628, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.6213314244810307, |
|
"grad_norm": 0.4914122331915044, |
|
"learning_rate": 3.7420661812774577e-06, |
|
"loss": 2.479, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.6241947029348605, |
|
"grad_norm": 0.4835020063832526, |
|
"learning_rate": 3.6938669623093086e-06, |
|
"loss": 2.4775, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.6270579813886901, |
|
"grad_norm": 0.4819963834404957, |
|
"learning_rate": 3.6457976592849753e-06, |
|
"loss": 2.4627, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.6299212598425197, |
|
"grad_norm": 0.48578165660736744, |
|
"learning_rate": 3.5978630534699873e-06, |
|
"loss": 2.4921, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.6327845382963493, |
|
"grad_norm": 0.4946223923765296, |
|
"learning_rate": 3.550067912732069e-06, |
|
"loss": 2.4802, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.635647816750179, |
|
"grad_norm": 0.47731930944616413, |
|
"learning_rate": 3.502416991066904e-06, |
|
"loss": 2.4862, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.6385110952040086, |
|
"grad_norm": 0.46672416010636814, |
|
"learning_rate": 3.4549150281252635e-06, |
|
"loss": 2.4746, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.6413743736578382, |
|
"grad_norm": 0.47432031556982895, |
|
"learning_rate": 3.4075667487415785e-06, |
|
"loss": 2.4461, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.6442376521116678, |
|
"grad_norm": 0.4613340476214188, |
|
"learning_rate": 3.3603768624639786e-06, |
|
"loss": 2.4734, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.6471009305654974, |
|
"grad_norm": 0.4879808421776038, |
|
"learning_rate": 3.3133500630858507e-06, |
|
"loss": 2.4458, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.6499642090193272, |
|
"grad_norm": 0.49504385746210705, |
|
"learning_rate": 3.266491028178964e-06, |
|
"loss": 2.4931, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.6528274874731568, |
|
"grad_norm": 0.46696567892600954, |
|
"learning_rate": 3.219804418628216e-06, |
|
"loss": 2.4582, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.6556907659269864, |
|
"grad_norm": 0.46371322697466877, |
|
"learning_rate": 3.173294878168025e-06, |
|
"loss": 2.4688, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.658554044380816, |
|
"grad_norm": 0.45817247273224015, |
|
"learning_rate": 3.12696703292044e-06, |
|
"loss": 2.4942, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.6614173228346457, |
|
"grad_norm": 0.47446326987653453, |
|
"learning_rate": 3.0808254909349987e-06, |
|
"loss": 2.4717, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.6642806012884753, |
|
"grad_norm": 0.46895233817717097, |
|
"learning_rate": 3.0348748417303826e-06, |
|
"loss": 2.4737, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.6671438797423049, |
|
"grad_norm": 0.47218853082979256, |
|
"learning_rate": 2.989119655837913e-06, |
|
"loss": 2.4635, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.6700071581961345, |
|
"grad_norm": 0.4704459933176104, |
|
"learning_rate": 2.9435644843469434e-06, |
|
"loss": 2.4932, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.6728704366499642, |
|
"grad_norm": 0.4649089431955579, |
|
"learning_rate": 2.8982138584521734e-06, |
|
"loss": 2.4827, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.6757337151037939, |
|
"grad_norm": 0.47709482926494806, |
|
"learning_rate": 2.853072289002954e-06, |
|
"loss": 2.465, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.6785969935576235, |
|
"grad_norm": 0.4637487649689432, |
|
"learning_rate": 2.8081442660546126e-06, |
|
"loss": 2.4768, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.6814602720114531, |
|
"grad_norm": 0.46866785131238103, |
|
"learning_rate": 2.7634342584218364e-06, |
|
"loss": 2.4784, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.6843235504652827, |
|
"grad_norm": 0.4894597393103176, |
|
"learning_rate": 2.718946713234185e-06, |
|
"loss": 2.5016, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.6871868289191124, |
|
"grad_norm": 0.4667475078124399, |
|
"learning_rate": 2.674686055493748e-06, |
|
"loss": 2.5086, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.690050107372942, |
|
"grad_norm": 0.4696416005150825, |
|
"learning_rate": 2.6306566876350072e-06, |
|
"loss": 2.4469, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.6929133858267716, |
|
"grad_norm": 0.46824017819944164, |
|
"learning_rate": 2.5868629890869467e-06, |
|
"loss": 2.4876, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.6957766642806013, |
|
"grad_norm": 0.46557559754046707, |
|
"learning_rate": 2.543309315837444e-06, |
|
"loss": 2.4287, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.698639942734431, |
|
"grad_norm": 0.4762974026954119, |
|
"learning_rate": 2.5000000000000015e-06, |
|
"loss": 2.5047, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.7015032211882606, |
|
"grad_norm": 0.4644049626901058, |
|
"learning_rate": 2.4569393493828433e-06, |
|
"loss": 2.457, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.7043664996420902, |
|
"grad_norm": 0.4731570583945912, |
|
"learning_rate": 2.4141316470604362e-06, |
|
"loss": 2.4875, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.7072297780959198, |
|
"grad_norm": 0.4652373549525223, |
|
"learning_rate": 2.371581150947476e-06, |
|
"loss": 2.501, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.7100930565497494, |
|
"grad_norm": 0.46149990291747417, |
|
"learning_rate": 2.3292920933753566e-06, |
|
"loss": 2.4767, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.7129563350035791, |
|
"grad_norm": 0.45894170076752305, |
|
"learning_rate": 2.2872686806712037e-06, |
|
"loss": 2.466, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.7158196134574087, |
|
"grad_norm": 0.4587340338845331, |
|
"learning_rate": 2.245515092739488e-06, |
|
"loss": 2.4843, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.7186828919112384, |
|
"grad_norm": 0.4568306102089786, |
|
"learning_rate": 2.204035482646267e-06, |
|
"loss": 2.4452, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.721546170365068, |
|
"grad_norm": 0.45503692900873527, |
|
"learning_rate": 2.162833976206092e-06, |
|
"loss": 2.4854, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.7244094488188977, |
|
"grad_norm": 0.45864934324739526, |
|
"learning_rate": 2.1219146715716332e-06, |
|
"loss": 2.5048, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 0.4616476773972489, |
|
"learning_rate": 2.081281638826052e-06, |
|
"loss": 2.4602, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.7301360057265569, |
|
"grad_norm": 0.4691408422165569, |
|
"learning_rate": 2.0409389195781627e-06, |
|
"loss": 2.4943, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.7329992841803865, |
|
"grad_norm": 0.46727999266440634, |
|
"learning_rate": 2.0008905265604316e-06, |
|
"loss": 2.4899, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.7358625626342162, |
|
"grad_norm": 0.4751400894854323, |
|
"learning_rate": 1.9611404432298505e-06, |
|
"loss": 2.4554, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.7387258410880458, |
|
"grad_norm": 0.47283617833966596, |
|
"learning_rate": 1.9216926233717087e-06, |
|
"loss": 2.4631, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.7415891195418755, |
|
"grad_norm": 0.4579533439995314, |
|
"learning_rate": 1.8825509907063328e-06, |
|
"loss": 2.4794, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.7444523979957051, |
|
"grad_norm": 0.46285462884634804, |
|
"learning_rate": 1.843719438498806e-06, |
|
"loss": 2.4447, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.7473156764495347, |
|
"grad_norm": 0.45609200392812344, |
|
"learning_rate": 1.8052018291717216e-06, |
|
"loss": 2.5097, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.7501789549033644, |
|
"grad_norm": 0.4736752845679603, |
|
"learning_rate": 1.7670019939210025e-06, |
|
"loss": 2.4632, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.753042233357194, |
|
"grad_norm": 0.47156563387390643, |
|
"learning_rate": 1.7291237323348287e-06, |
|
"loss": 2.4749, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.7559055118110236, |
|
"grad_norm": 0.46047471823042235, |
|
"learning_rate": 1.6915708120157042e-06, |
|
"loss": 2.4313, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.7559055118110236, |
|
"eval_loss": 2.936035633087158, |
|
"eval_runtime": 15.4546, |
|
"eval_samples_per_second": 14.106, |
|
"eval_steps_per_second": 2.394, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.7587687902648532, |
|
"grad_norm": 0.46741816845331546, |
|
"learning_rate": 1.6543469682057105e-06, |
|
"loss": 2.4685, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.7616320687186829, |
|
"grad_norm": 0.44870053167803253, |
|
"learning_rate": 1.617455903414974e-06, |
|
"loss": 2.4712, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.7644953471725126, |
|
"grad_norm": 0.4561531748512701, |
|
"learning_rate": 1.5809012870533996e-06, |
|
"loss": 2.4874, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.7673586256263422, |
|
"grad_norm": 0.45480554069330664, |
|
"learning_rate": 1.544686755065677e-06, |
|
"loss": 2.443, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.7702219040801718, |
|
"grad_norm": 0.46600032777395256, |
|
"learning_rate": 1.5088159095696365e-06, |
|
"loss": 2.5145, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.7730851825340014, |
|
"grad_norm": 0.48866030251102854, |
|
"learning_rate": 1.4732923184979563e-06, |
|
"loss": 2.4231, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.7759484609878311, |
|
"grad_norm": 0.48186026098585555, |
|
"learning_rate": 1.438119515243277e-06, |
|
"loss": 2.4738, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.7788117394416607, |
|
"grad_norm": 0.4589872555488135, |
|
"learning_rate": 1.4033009983067454e-06, |
|
"loss": 2.4859, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.7816750178954903, |
|
"grad_norm": 0.4459291232495709, |
|
"learning_rate": 1.3688402309500353e-06, |
|
"loss": 2.5005, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.7845382963493199, |
|
"grad_norm": 0.4513699250772011, |
|
"learning_rate": 1.3347406408508695e-06, |
|
"loss": 2.487, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.7874015748031497, |
|
"grad_norm": 0.4567378969589174, |
|
"learning_rate": 1.3010056197620813e-06, |
|
"loss": 2.4573, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.7902648532569793, |
|
"grad_norm": 0.4692124594286597, |
|
"learning_rate": 1.2676385231742493e-06, |
|
"loss": 2.4628, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.7931281317108089, |
|
"grad_norm": 0.457853009198463, |
|
"learning_rate": 1.234642669981946e-06, |
|
"loss": 2.4425, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.7959914101646385, |
|
"grad_norm": 0.46355613438320215, |
|
"learning_rate": 1.2020213421536103e-06, |
|
"loss": 2.5078, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.7988546886184682, |
|
"grad_norm": 0.4560209731740047, |
|
"learning_rate": 1.1697777844051105e-06, |
|
"loss": 2.4658, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.8017179670722978, |
|
"grad_norm": 0.45676904584836525, |
|
"learning_rate": 1.137915203877003e-06, |
|
"loss": 2.456, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.8045812455261274, |
|
"grad_norm": 0.454564657506919, |
|
"learning_rate": 1.1064367698155303e-06, |
|
"loss": 2.4476, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.807444523979957, |
|
"grad_norm": 0.46526091929632524, |
|
"learning_rate": 1.0753456132573886e-06, |
|
"loss": 2.478, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.8103078024337866, |
|
"grad_norm": 0.4552190309373214, |
|
"learning_rate": 1.044644826718295e-06, |
|
"loss": 2.4653, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.8131710808876164, |
|
"grad_norm": 0.46555915439648266, |
|
"learning_rate": 1.0143374638853892e-06, |
|
"loss": 2.4766, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.816034359341446, |
|
"grad_norm": 0.4572802006505305, |
|
"learning_rate": 9.844265393134927e-07, |
|
"loss": 2.4569, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.8188976377952756, |
|
"grad_norm": 0.4458605895386478, |
|
"learning_rate": 9.549150281252633e-07, |
|
"loss": 2.4695, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.8217609162491052, |
|
"grad_norm": 0.4591574303861129, |
|
"learning_rate": 9.258058657152763e-07, |
|
"loss": 2.4416, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.8246241947029349, |
|
"grad_norm": 0.45928170428426124, |
|
"learning_rate": 8.971019474580428e-07, |
|
"loss": 2.4722, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.8274874731567645, |
|
"grad_norm": 0.4546308467235846, |
|
"learning_rate": 8.688061284200266e-07, |
|
"loss": 2.4638, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.8303507516105941, |
|
"grad_norm": 0.46135706461175, |
|
"learning_rate": 8.409212230756564e-07, |
|
"loss": 2.4896, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.8332140300644237, |
|
"grad_norm": 0.4357442522497734, |
|
"learning_rate": 8.134500050273841e-07, |
|
"loss": 2.4886, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.8360773085182533, |
|
"grad_norm": 0.4563872305836051, |
|
"learning_rate": 7.863952067298042e-07, |
|
"loss": 2.4891, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.8389405869720831, |
|
"grad_norm": 0.4598435875124808, |
|
"learning_rate": 7.597595192178702e-07, |
|
"loss": 2.4513, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.8418038654259127, |
|
"grad_norm": 0.44658046634222204, |
|
"learning_rate": 7.33545591839222e-07, |
|
"loss": 2.4831, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.8446671438797423, |
|
"grad_norm": 0.44490014330370886, |
|
"learning_rate": 7.077560319906696e-07, |
|
"loss": 2.4633, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.8475304223335719, |
|
"grad_norm": 0.45416636123029175, |
|
"learning_rate": 6.82393404858846e-07, |
|
"loss": 2.4548, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.8503937007874016, |
|
"grad_norm": 0.49083956893955283, |
|
"learning_rate": 6.574602331650559e-07, |
|
"loss": 2.4829, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.8532569792412312, |
|
"grad_norm": 0.45761237430081714, |
|
"learning_rate": 6.329589969143518e-07, |
|
"loss": 2.4761, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.8561202576950608, |
|
"grad_norm": 0.46149826051853654, |
|
"learning_rate": 6.088921331488568e-07, |
|
"loss": 2.4669, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.8589835361488904, |
|
"grad_norm": 0.44852500170386056, |
|
"learning_rate": 5.852620357053651e-07, |
|
"loss": 2.4889, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.8618468146027202, |
|
"grad_norm": 0.44535055340755936, |
|
"learning_rate": 5.620710549772295e-07, |
|
"loss": 2.454, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.8647100930565498, |
|
"grad_norm": 0.4523724462946578, |
|
"learning_rate": 5.393214976805833e-07, |
|
"loss": 2.4829, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.8675733715103794, |
|
"grad_norm": 0.4512432935032381, |
|
"learning_rate": 5.17015626624896e-07, |
|
"loss": 2.4964, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.870436649964209, |
|
"grad_norm": 0.4502752764610012, |
|
"learning_rate": 4.951556604879049e-07, |
|
"loss": 2.4652, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.8732999284180386, |
|
"grad_norm": 0.4418666890355134, |
|
"learning_rate": 4.737437735949263e-07, |
|
"loss": 2.4771, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.8761632068718683, |
|
"grad_norm": 0.4661160898935762, |
|
"learning_rate": 4.5278209570258914e-07, |
|
"loss": 2.4867, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.8790264853256979, |
|
"grad_norm": 0.4611073292882843, |
|
"learning_rate": 4.322727117869951e-07, |
|
"loss": 2.4746, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.8818897637795275, |
|
"grad_norm": 0.4446102922832806, |
|
"learning_rate": 4.122176618363305e-07, |
|
"loss": 2.4695, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.8847530422333572, |
|
"grad_norm": 0.44729778668314735, |
|
"learning_rate": 3.9261894064796136e-07, |
|
"loss": 2.4678, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.8876163206871869, |
|
"grad_norm": 0.45295141361260305, |
|
"learning_rate": 3.734784976300165e-07, |
|
"loss": 2.4926, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.8904795991410165, |
|
"grad_norm": 0.46930631413078255, |
|
"learning_rate": 3.5479823660748703e-07, |
|
"loss": 2.4514, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.8933428775948461, |
|
"grad_norm": 0.4417853093677557, |
|
"learning_rate": 3.365800156328619e-07, |
|
"loss": 2.4905, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.8962061560486757, |
|
"grad_norm": 0.4440433821954867, |
|
"learning_rate": 3.18825646801314e-07, |
|
"loss": 2.4864, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.8990694345025053, |
|
"grad_norm": 0.45330282639589614, |
|
"learning_rate": 3.015368960704584e-07, |
|
"loss": 2.4328, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.901932712956335, |
|
"grad_norm": 0.44590581999226264, |
|
"learning_rate": 2.847154830846971e-07, |
|
"loss": 2.501, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.9047959914101646, |
|
"grad_norm": 0.4534475837301284, |
|
"learning_rate": 2.6836308100417874e-07, |
|
"loss": 2.4455, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.9076592698639943, |
|
"grad_norm": 0.4448096353367094, |
|
"learning_rate": 2.524813163383683e-07, |
|
"loss": 2.4311, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.9105225483178239, |
|
"grad_norm": 0.44389847976222563, |
|
"learning_rate": 2.3707176878426886e-07, |
|
"loss": 2.4872, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.9133858267716536, |
|
"grad_norm": 0.44380665859676643, |
|
"learning_rate": 2.2213597106929608e-07, |
|
"loss": 2.4491, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.9162491052254832, |
|
"grad_norm": 0.44824967084915274, |
|
"learning_rate": 2.0767540879882143e-07, |
|
"loss": 2.4336, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.9191123836793128, |
|
"grad_norm": 0.4615916047912525, |
|
"learning_rate": 1.9369152030840553e-07, |
|
"loss": 2.4893, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.9219756621331424, |
|
"grad_norm": 0.44637662830765845, |
|
"learning_rate": 1.801856965207338e-07, |
|
"loss": 2.4488, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.9248389405869721, |
|
"grad_norm": 0.446240721408501, |
|
"learning_rate": 1.6715928080726417e-07, |
|
"loss": 2.4527, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.9277022190408017, |
|
"grad_norm": 0.4560572613231896, |
|
"learning_rate": 1.5461356885461077e-07, |
|
"loss": 2.4604, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.9305654974946314, |
|
"grad_norm": 0.4314293357419993, |
|
"learning_rate": 1.4254980853566248e-07, |
|
"loss": 2.479, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.933428775948461, |
|
"grad_norm": 0.44433384067807596, |
|
"learning_rate": 1.3096919978546842e-07, |
|
"loss": 2.4698, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.9362920544022906, |
|
"grad_norm": 0.43730852720830754, |
|
"learning_rate": 1.1987289448187777e-07, |
|
"loss": 2.4865, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.9391553328561203, |
|
"grad_norm": 0.4284159960564044, |
|
"learning_rate": 1.0926199633097156e-07, |
|
"loss": 2.4316, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.9420186113099499, |
|
"grad_norm": 0.44571916504011744, |
|
"learning_rate": 9.913756075728088e-08, |
|
"loss": 2.5041, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.9448818897637795, |
|
"grad_norm": 0.45304237672597053, |
|
"learning_rate": 8.950059479880591e-08, |
|
"loss": 2.5114, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.9477451682176091, |
|
"grad_norm": 0.43598348530628495, |
|
"learning_rate": 8.035205700685167e-08, |
|
"loss": 2.5127, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.9506084466714388, |
|
"grad_norm": 0.4356879074001767, |
|
"learning_rate": 7.169285735068531e-08, |
|
"loss": 2.4692, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.9534717251252685, |
|
"grad_norm": 0.44682377084462166, |
|
"learning_rate": 6.352385712702191e-08, |
|
"loss": 2.4778, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.9563350035790981, |
|
"grad_norm": 0.4474749989010966, |
|
"learning_rate": 5.584586887435739e-08, |
|
"loss": 2.4738, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.9591982820329277, |
|
"grad_norm": 0.44254145584508675, |
|
"learning_rate": 4.865965629214819e-08, |
|
"loss": 2.4445, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.9620615604867573, |
|
"grad_norm": 0.4814270534171316, |
|
"learning_rate": 4.196593416484873e-08, |
|
"loss": 2.5038, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.964924838940587, |
|
"grad_norm": 0.4449207768038768, |
|
"learning_rate": 3.576536829081323e-08, |
|
"loss": 2.4704, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.9677881173944166, |
|
"grad_norm": 0.44401542703062347, |
|
"learning_rate": 3.005857541607371e-08, |
|
"loss": 2.5191, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.9706513958482462, |
|
"grad_norm": 0.43424375987912595, |
|
"learning_rate": 2.4846123172992953e-08, |
|
"loss": 2.4885, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.9735146743020758, |
|
"grad_norm": 0.4473592208860157, |
|
"learning_rate": 2.012853002380466e-08, |
|
"loss": 2.4681, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.9763779527559056, |
|
"grad_norm": 0.45638058238294077, |
|
"learning_rate": 1.590626520904526e-08, |
|
"loss": 2.4722, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.9792412312097352, |
|
"grad_norm": 0.4442471751823356, |
|
"learning_rate": 1.2179748700879013e-08, |
|
"loss": 2.4685, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.9821045096635648, |
|
"grad_norm": 0.44699422552952645, |
|
"learning_rate": 8.949351161324227e-09, |
|
"loss": 2.4736, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.9849677881173944, |
|
"grad_norm": 0.44843427084177795, |
|
"learning_rate": 6.215393905388278e-09, |
|
"loss": 2.4758, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.9878310665712241, |
|
"grad_norm": 0.4430207903952749, |
|
"learning_rate": 3.978148869103748e-09, |
|
"loss": 2.4823, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.9906943450250537, |
|
"grad_norm": 0.44535966484744366, |
|
"learning_rate": 2.237838582483387e-09, |
|
"loss": 2.4529, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.9935576234788833, |
|
"grad_norm": 0.43129364144349974, |
|
"learning_rate": 9.946361473822664e-10, |
|
"loss": 2.4717, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.9964209019327129, |
|
"grad_norm": 0.44688887849751735, |
|
"learning_rate": 2.486652202848827e-10, |
|
"loss": 2.4743, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.9992841803865425, |
|
"grad_norm": 0.44578455043995546, |
|
"learning_rate": 0.0, |
|
"loss": 2.5218, |
|
"step": 349 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 349, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.4632318176290406e+17, |
|
"train_batch_size": 3, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|