|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1624, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0006157635467980296, |
|
"grad_norm": 7.310926153142698, |
|
"learning_rate": 1.226993865030675e-07, |
|
"loss": 1.5491, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.003078817733990148, |
|
"grad_norm": 7.35403837138417, |
|
"learning_rate": 6.134969325153375e-07, |
|
"loss": 1.5843, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.006157635467980296, |
|
"grad_norm": 6.803167318550182, |
|
"learning_rate": 1.226993865030675e-06, |
|
"loss": 1.5309, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.009236453201970444, |
|
"grad_norm": 5.835727118488878, |
|
"learning_rate": 1.8404907975460124e-06, |
|
"loss": 1.3594, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.012315270935960592, |
|
"grad_norm": 3.698538754344085, |
|
"learning_rate": 2.45398773006135e-06, |
|
"loss": 1.2405, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01539408866995074, |
|
"grad_norm": 2.041108035069763, |
|
"learning_rate": 3.0674846625766875e-06, |
|
"loss": 1.1387, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01847290640394089, |
|
"grad_norm": 1.0814828345339402, |
|
"learning_rate": 3.680981595092025e-06, |
|
"loss": 1.0108, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.021551724137931036, |
|
"grad_norm": 0.8687795931835679, |
|
"learning_rate": 4.294478527607362e-06, |
|
"loss": 0.9621, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.024630541871921183, |
|
"grad_norm": 0.8664847350475609, |
|
"learning_rate": 4.9079754601227e-06, |
|
"loss": 1.0134, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02770935960591133, |
|
"grad_norm": 0.8801651222007403, |
|
"learning_rate": 5.521472392638038e-06, |
|
"loss": 0.9283, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.03078817733990148, |
|
"grad_norm": 0.8636788681790382, |
|
"learning_rate": 6.134969325153375e-06, |
|
"loss": 0.9328, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.033866995073891626, |
|
"grad_norm": 0.8253490914438462, |
|
"learning_rate": 6.748466257668712e-06, |
|
"loss": 0.9051, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03694581280788178, |
|
"grad_norm": 0.8184367283533114, |
|
"learning_rate": 7.36196319018405e-06, |
|
"loss": 1.0101, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04002463054187192, |
|
"grad_norm": 0.8465714119368565, |
|
"learning_rate": 7.975460122699386e-06, |
|
"loss": 0.9229, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.04310344827586207, |
|
"grad_norm": 0.8285519257452998, |
|
"learning_rate": 8.588957055214725e-06, |
|
"loss": 0.9491, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.046182266009852216, |
|
"grad_norm": 0.769823593105267, |
|
"learning_rate": 9.202453987730062e-06, |
|
"loss": 0.9726, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.04926108374384237, |
|
"grad_norm": 0.8064693479221889, |
|
"learning_rate": 9.8159509202454e-06, |
|
"loss": 0.9667, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.05233990147783251, |
|
"grad_norm": 1.0363791467221333, |
|
"learning_rate": 1.0429447852760737e-05, |
|
"loss": 0.9209, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.05541871921182266, |
|
"grad_norm": 0.7842863481909221, |
|
"learning_rate": 1.1042944785276076e-05, |
|
"loss": 0.9398, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.058497536945812806, |
|
"grad_norm": 0.7367025450271165, |
|
"learning_rate": 1.1656441717791411e-05, |
|
"loss": 0.9461, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.06157635467980296, |
|
"grad_norm": 0.7689269543001077, |
|
"learning_rate": 1.226993865030675e-05, |
|
"loss": 0.9177, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.06465517241379311, |
|
"grad_norm": 0.8274857270444617, |
|
"learning_rate": 1.2883435582822085e-05, |
|
"loss": 0.9525, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.06773399014778325, |
|
"grad_norm": 0.7618255279710626, |
|
"learning_rate": 1.3496932515337424e-05, |
|
"loss": 0.9264, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.0708128078817734, |
|
"grad_norm": 0.7980402273299848, |
|
"learning_rate": 1.4110429447852763e-05, |
|
"loss": 0.9339, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.07389162561576355, |
|
"grad_norm": 0.8376644789397331, |
|
"learning_rate": 1.47239263803681e-05, |
|
"loss": 0.8859, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.0769704433497537, |
|
"grad_norm": 0.8130407079299852, |
|
"learning_rate": 1.5337423312883436e-05, |
|
"loss": 0.9146, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.08004926108374384, |
|
"grad_norm": 0.7956012283141837, |
|
"learning_rate": 1.5950920245398772e-05, |
|
"loss": 0.9154, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.08312807881773399, |
|
"grad_norm": 0.7948481430201038, |
|
"learning_rate": 1.656441717791411e-05, |
|
"loss": 0.9576, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.08620689655172414, |
|
"grad_norm": 0.8257741416380704, |
|
"learning_rate": 1.717791411042945e-05, |
|
"loss": 0.9515, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.08928571428571429, |
|
"grad_norm": 0.8290236573203579, |
|
"learning_rate": 1.7791411042944788e-05, |
|
"loss": 0.9678, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.09236453201970443, |
|
"grad_norm": 0.9224233612252317, |
|
"learning_rate": 1.8404907975460123e-05, |
|
"loss": 0.9644, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.09544334975369458, |
|
"grad_norm": 0.8138389103383028, |
|
"learning_rate": 1.9018404907975462e-05, |
|
"loss": 0.9028, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.09852216748768473, |
|
"grad_norm": 0.8149251514397249, |
|
"learning_rate": 1.96319018404908e-05, |
|
"loss": 0.9381, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.10160098522167488, |
|
"grad_norm": 0.857572912046182, |
|
"learning_rate": 1.999990752408443e-05, |
|
"loss": 0.9526, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.10467980295566502, |
|
"grad_norm": 0.8424271480084681, |
|
"learning_rate": 1.9998867189676517e-05, |
|
"loss": 0.9598, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.10775862068965517, |
|
"grad_norm": 0.7367129010600535, |
|
"learning_rate": 1.999667104662347e-05, |
|
"loss": 0.9543, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.11083743842364532, |
|
"grad_norm": 0.7601180219708648, |
|
"learning_rate": 1.9993319348786157e-05, |
|
"loss": 0.9672, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.11391625615763547, |
|
"grad_norm": 0.796186238392561, |
|
"learning_rate": 1.9988812483600597e-05, |
|
"loss": 0.8957, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.11699507389162561, |
|
"grad_norm": 0.7747918983305362, |
|
"learning_rate": 1.9983150972033186e-05, |
|
"loss": 0.963, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.12007389162561577, |
|
"grad_norm": 0.8300480182123051, |
|
"learning_rate": 1.9976335468520452e-05, |
|
"loss": 0.9824, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.12315270935960591, |
|
"grad_norm": 0.8310354348747966, |
|
"learning_rate": 1.9968366760893437e-05, |
|
"loss": 0.9692, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.12623152709359606, |
|
"grad_norm": 0.8445303140068814, |
|
"learning_rate": 1.9959245770286602e-05, |
|
"loss": 0.9546, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.12931034482758622, |
|
"grad_norm": 0.8979903129953134, |
|
"learning_rate": 1.994897355103136e-05, |
|
"loss": 0.9375, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.13238916256157635, |
|
"grad_norm": 0.7465524065375957, |
|
"learning_rate": 1.9937551290534208e-05, |
|
"loss": 0.9057, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.1354679802955665, |
|
"grad_norm": 0.7497704866674313, |
|
"learning_rate": 1.9924980309139455e-05, |
|
"loss": 0.9434, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.13854679802955666, |
|
"grad_norm": 0.7076359322846222, |
|
"learning_rate": 1.9911262059976614e-05, |
|
"loss": 0.9462, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.1416256157635468, |
|
"grad_norm": 0.7848088777070403, |
|
"learning_rate": 1.9896398128792413e-05, |
|
"loss": 0.9476, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.14470443349753695, |
|
"grad_norm": 0.8165026350741112, |
|
"learning_rate": 1.988039023376751e-05, |
|
"loss": 0.9527, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.1477832512315271, |
|
"grad_norm": 0.8498025037832465, |
|
"learning_rate": 1.9863240225317868e-05, |
|
"loss": 1.0053, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.15086206896551724, |
|
"grad_norm": 0.7353554935083384, |
|
"learning_rate": 1.984495008588086e-05, |
|
"loss": 0.9222, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.1539408866995074, |
|
"grad_norm": 0.7553279060128398, |
|
"learning_rate": 1.982552192968612e-05, |
|
"loss": 0.9732, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.15701970443349753, |
|
"grad_norm": 0.7548683527271752, |
|
"learning_rate": 1.9804958002511137e-05, |
|
"loss": 0.9459, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.16009852216748768, |
|
"grad_norm": 0.7794156820504012, |
|
"learning_rate": 1.9783260681421667e-05, |
|
"loss": 0.9573, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.16317733990147784, |
|
"grad_norm": 0.7422263502441188, |
|
"learning_rate": 1.9760432474496963e-05, |
|
"loss": 0.9375, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.16625615763546797, |
|
"grad_norm": 0.7259884159865896, |
|
"learning_rate": 1.973647602053984e-05, |
|
"loss": 0.9483, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.16933497536945813, |
|
"grad_norm": 0.7801247475320573, |
|
"learning_rate": 1.9711394088771658e-05, |
|
"loss": 0.9663, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.1724137931034483, |
|
"grad_norm": 0.8468086332749512, |
|
"learning_rate": 1.9685189578512206e-05, |
|
"loss": 0.9581, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.17549261083743842, |
|
"grad_norm": 0.760222004142156, |
|
"learning_rate": 1.9657865518844578e-05, |
|
"loss": 0.9405, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.17857142857142858, |
|
"grad_norm": 0.818552353689237, |
|
"learning_rate": 1.962942506826501e-05, |
|
"loss": 0.9436, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.1816502463054187, |
|
"grad_norm": 0.7270444014459365, |
|
"learning_rate": 1.9599871514317785e-05, |
|
"loss": 0.9587, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.18472906403940886, |
|
"grad_norm": 0.7569082732767817, |
|
"learning_rate": 1.9569208273215204e-05, |
|
"loss": 0.9092, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.18780788177339902, |
|
"grad_norm": 0.8035781870618336, |
|
"learning_rate": 1.953743888944271e-05, |
|
"loss": 0.9207, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.19088669950738915, |
|
"grad_norm": 0.798197452115362, |
|
"learning_rate": 1.950456703534915e-05, |
|
"loss": 0.9722, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.1939655172413793, |
|
"grad_norm": 0.7911708031305178, |
|
"learning_rate": 1.9470596510722285e-05, |
|
"loss": 0.9088, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.19704433497536947, |
|
"grad_norm": 0.7671171753975288, |
|
"learning_rate": 1.9435531242349545e-05, |
|
"loss": 1.0116, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.2001231527093596, |
|
"grad_norm": 0.7471976542017915, |
|
"learning_rate": 1.9399375283564134e-05, |
|
"loss": 0.9443, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.20320197044334976, |
|
"grad_norm": 0.7932187536453967, |
|
"learning_rate": 1.9362132813776472e-05, |
|
"loss": 0.937, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.2062807881773399, |
|
"grad_norm": 0.7683161798892411, |
|
"learning_rate": 1.9323808137991084e-05, |
|
"loss": 0.9485, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.20935960591133004, |
|
"grad_norm": 0.7661725970320424, |
|
"learning_rate": 1.9284405686308982e-05, |
|
"loss": 0.9133, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.2124384236453202, |
|
"grad_norm": 0.7873862980673396, |
|
"learning_rate": 1.924393001341555e-05, |
|
"loss": 0.9207, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.21551724137931033, |
|
"grad_norm": 0.7452478426256695, |
|
"learning_rate": 1.9202385798054073e-05, |
|
"loss": 0.9087, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.2185960591133005, |
|
"grad_norm": 0.7793073146063183, |
|
"learning_rate": 1.9159777842484878e-05, |
|
"loss": 0.9345, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.22167487684729065, |
|
"grad_norm": 0.8169813616813546, |
|
"learning_rate": 1.911611107193024e-05, |
|
"loss": 0.9382, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.22475369458128078, |
|
"grad_norm": 0.7266251337678646, |
|
"learning_rate": 1.9071390534005045e-05, |
|
"loss": 0.9732, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.22783251231527094, |
|
"grad_norm": 0.7477667607003808, |
|
"learning_rate": 1.9025621398133333e-05, |
|
"loss": 0.9518, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.2309113300492611, |
|
"grad_norm": 0.9165568422039077, |
|
"learning_rate": 1.8978808954950722e-05, |
|
"loss": 0.9503, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.23399014778325122, |
|
"grad_norm": 0.7529486101747109, |
|
"learning_rate": 1.8930958615692854e-05, |
|
"loss": 0.9591, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.23706896551724138, |
|
"grad_norm": 0.6888920479042022, |
|
"learning_rate": 1.8882075911569887e-05, |
|
"loss": 0.88, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.24014778325123154, |
|
"grad_norm": 0.7255523898786278, |
|
"learning_rate": 1.8832166493127128e-05, |
|
"loss": 0.933, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.24322660098522167, |
|
"grad_norm": 0.7128259336980561, |
|
"learning_rate": 1.8781236129591847e-05, |
|
"loss": 0.9078, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.24630541871921183, |
|
"grad_norm": 0.7070843217854903, |
|
"learning_rate": 1.8729290708206412e-05, |
|
"loss": 0.9636, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.24938423645320196, |
|
"grad_norm": 0.718867215532029, |
|
"learning_rate": 1.867633623354773e-05, |
|
"loss": 0.9556, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.2524630541871921, |
|
"grad_norm": 0.7805465337550137, |
|
"learning_rate": 1.8622378826833186e-05, |
|
"loss": 0.9685, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.2555418719211823, |
|
"grad_norm": 0.7719203711056516, |
|
"learning_rate": 1.856742472521304e-05, |
|
"loss": 0.9524, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.25862068965517243, |
|
"grad_norm": 0.7192834205111203, |
|
"learning_rate": 1.8511480281049475e-05, |
|
"loss": 0.9246, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.2616995073891626, |
|
"grad_norm": 0.6723358874994304, |
|
"learning_rate": 1.8454551961182276e-05, |
|
"loss": 0.9173, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.2647783251231527, |
|
"grad_norm": 0.6817001490385899, |
|
"learning_rate": 1.8396646346181327e-05, |
|
"loss": 0.9389, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.26785714285714285, |
|
"grad_norm": 0.7207425588978127, |
|
"learning_rate": 1.8337770129585918e-05, |
|
"loss": 0.9731, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.270935960591133, |
|
"grad_norm": 0.6937128223592749, |
|
"learning_rate": 1.8277930117131025e-05, |
|
"loss": 0.9666, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.27401477832512317, |
|
"grad_norm": 0.7206019687649675, |
|
"learning_rate": 1.8217133225960597e-05, |
|
"loss": 0.9047, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.2770935960591133, |
|
"grad_norm": 0.6589164154450292, |
|
"learning_rate": 1.8155386483827995e-05, |
|
"loss": 0.9323, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.2801724137931034, |
|
"grad_norm": 0.7477503173033859, |
|
"learning_rate": 1.8092697028283598e-05, |
|
"loss": 0.9434, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.2832512315270936, |
|
"grad_norm": 0.6946681549404662, |
|
"learning_rate": 1.8029072105849767e-05, |
|
"loss": 0.9097, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.28633004926108374, |
|
"grad_norm": 0.7993317655881385, |
|
"learning_rate": 1.7964519071183188e-05, |
|
"loss": 0.9484, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.2894088669950739, |
|
"grad_norm": 0.7061937279236051, |
|
"learning_rate": 1.789904538622471e-05, |
|
"loss": 0.9456, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.29248768472906406, |
|
"grad_norm": 0.7072130152863852, |
|
"learning_rate": 1.7832658619336794e-05, |
|
"loss": 0.9524, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.2955665024630542, |
|
"grad_norm": 0.714294689243862, |
|
"learning_rate": 1.7765366444428655e-05, |
|
"loss": 0.9547, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.2986453201970443, |
|
"grad_norm": 0.7609301009818032, |
|
"learning_rate": 1.7697176640069217e-05, |
|
"loss": 0.9146, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.3017241379310345, |
|
"grad_norm": 0.7537368897585695, |
|
"learning_rate": 1.762809708858793e-05, |
|
"loss": 0.9451, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.30480295566502463, |
|
"grad_norm": 0.7477622773826048, |
|
"learning_rate": 1.7558135775163645e-05, |
|
"loss": 0.9489, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.3078817733990148, |
|
"grad_norm": 0.8035379494578148, |
|
"learning_rate": 1.7487300786901568e-05, |
|
"loss": 0.887, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.31096059113300495, |
|
"grad_norm": 0.8085018346753247, |
|
"learning_rate": 1.7415600311898436e-05, |
|
"loss": 0.9788, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.31403940886699505, |
|
"grad_norm": 0.7199766001172444, |
|
"learning_rate": 1.734304263829602e-05, |
|
"loss": 0.9482, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.3171182266009852, |
|
"grad_norm": 0.7155949547205896, |
|
"learning_rate": 1.726963615332308e-05, |
|
"loss": 0.9638, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.32019704433497537, |
|
"grad_norm": 0.8023351717912234, |
|
"learning_rate": 1.7195389342325843e-05, |
|
"loss": 0.9418, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.3232758620689655, |
|
"grad_norm": 0.6707946282641698, |
|
"learning_rate": 1.7120310787787136e-05, |
|
"loss": 0.9051, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.3263546798029557, |
|
"grad_norm": 0.7628307879605529, |
|
"learning_rate": 1.7044409168334327e-05, |
|
"loss": 0.9301, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.3294334975369458, |
|
"grad_norm": 0.7018750333060788, |
|
"learning_rate": 1.696769325773611e-05, |
|
"loss": 0.9203, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.33251231527093594, |
|
"grad_norm": 0.8985483854038159, |
|
"learning_rate": 1.6890171923888323e-05, |
|
"loss": 0.942, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.3355911330049261, |
|
"grad_norm": 0.7095099515570173, |
|
"learning_rate": 1.6811854127788857e-05, |
|
"loss": 0.9883, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.33866995073891626, |
|
"grad_norm": 0.663301100449732, |
|
"learning_rate": 1.6732748922501832e-05, |
|
"loss": 0.9958, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.3417487684729064, |
|
"grad_norm": 0.7007829684906022, |
|
"learning_rate": 1.6652865452111115e-05, |
|
"loss": 0.8977, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.3448275862068966, |
|
"grad_norm": 0.7794993793101852, |
|
"learning_rate": 1.657221295066332e-05, |
|
"loss": 0.963, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.3479064039408867, |
|
"grad_norm": 0.6817973499177562, |
|
"learning_rate": 1.6490800741100396e-05, |
|
"loss": 0.9448, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.35098522167487683, |
|
"grad_norm": 0.7464691369559269, |
|
"learning_rate": 1.6408638234181975e-05, |
|
"loss": 0.9295, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.354064039408867, |
|
"grad_norm": 0.6661854545324757, |
|
"learning_rate": 1.6325734927397514e-05, |
|
"loss": 0.943, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.35714285714285715, |
|
"grad_norm": 0.6704430428924031, |
|
"learning_rate": 1.6242100403868472e-05, |
|
"loss": 0.9223, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.3602216748768473, |
|
"grad_norm": 0.7450322676169298, |
|
"learning_rate": 1.615774433124054e-05, |
|
"loss": 0.9296, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.3633004926108374, |
|
"grad_norm": 0.785984380526607, |
|
"learning_rate": 1.6072676460566136e-05, |
|
"loss": 0.952, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.36637931034482757, |
|
"grad_norm": 0.7156872420189895, |
|
"learning_rate": 1.5986906625177215e-05, |
|
"loss": 0.9382, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.3694581280788177, |
|
"grad_norm": 0.7208351590256996, |
|
"learning_rate": 1.590044473954863e-05, |
|
"loss": 0.9951, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.3725369458128079, |
|
"grad_norm": 0.6980642220237895, |
|
"learning_rate": 1.5813300798152048e-05, |
|
"loss": 0.9297, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.37561576354679804, |
|
"grad_norm": 0.6658042870926646, |
|
"learning_rate": 1.5725484874300673e-05, |
|
"loss": 0.9697, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.3786945812807882, |
|
"grad_norm": 0.6989438524636783, |
|
"learning_rate": 1.5637007118984814e-05, |
|
"loss": 0.9356, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.3817733990147783, |
|
"grad_norm": 0.6554764404296011, |
|
"learning_rate": 1.5547877759698498e-05, |
|
"loss": 0.9367, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.38485221674876846, |
|
"grad_norm": 0.7212087304435114, |
|
"learning_rate": 1.5458107099257245e-05, |
|
"loss": 0.9277, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.3879310344827586, |
|
"grad_norm": 0.7411621278427354, |
|
"learning_rate": 1.5367705514607107e-05, |
|
"loss": 0.9152, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.3910098522167488, |
|
"grad_norm": 0.7201507693917704, |
|
"learning_rate": 1.527668345562516e-05, |
|
"loss": 0.9169, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.39408866995073893, |
|
"grad_norm": 0.7894484158745393, |
|
"learning_rate": 1.518505144391157e-05, |
|
"loss": 0.946, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.39716748768472904, |
|
"grad_norm": 0.7132866192747803, |
|
"learning_rate": 1.5092820071573358e-05, |
|
"loss": 0.9612, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.4002463054187192, |
|
"grad_norm": 0.8036672482141785, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.936, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.40332512315270935, |
|
"grad_norm": 0.730609032181912, |
|
"learning_rate": 1.490660195863106e-05, |
|
"loss": 0.9155, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.4064039408866995, |
|
"grad_norm": 0.7875912602755151, |
|
"learning_rate": 1.4812636743715912e-05, |
|
"loss": 0.9289, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.40948275862068967, |
|
"grad_norm": 0.7504791630648989, |
|
"learning_rate": 1.4718115217065766e-05, |
|
"loss": 0.9457, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.4125615763546798, |
|
"grad_norm": 0.7348305998616946, |
|
"learning_rate": 1.462304830479811e-05, |
|
"loss": 0.9072, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.41564039408866993, |
|
"grad_norm": 0.758969021564288, |
|
"learning_rate": 1.4527446996073714e-05, |
|
"loss": 0.9177, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.4187192118226601, |
|
"grad_norm": 0.7012954160981419, |
|
"learning_rate": 1.4431322341826348e-05, |
|
"loss": 0.9025, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.42179802955665024, |
|
"grad_norm": 0.8675884737141334, |
|
"learning_rate": 1.433468545348537e-05, |
|
"loss": 0.9515, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.4248768472906404, |
|
"grad_norm": 0.6907521835103683, |
|
"learning_rate": 1.4237547501691298e-05, |
|
"loss": 0.9369, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.42795566502463056, |
|
"grad_norm": 0.8760691875396653, |
|
"learning_rate": 1.4139919715004558e-05, |
|
"loss": 0.9336, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.43103448275862066, |
|
"grad_norm": 0.7605520563419292, |
|
"learning_rate": 1.4041813378607534e-05, |
|
"loss": 0.9459, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.4341133004926108, |
|
"grad_norm": 0.7482466819256248, |
|
"learning_rate": 1.3943239833000068e-05, |
|
"loss": 0.9281, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.437192118226601, |
|
"grad_norm": 0.6853008905243693, |
|
"learning_rate": 1.3844210472688557e-05, |
|
"loss": 0.9115, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.44027093596059114, |
|
"grad_norm": 0.6785408874646308, |
|
"learning_rate": 1.3744736744868832e-05, |
|
"loss": 0.9147, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.4433497536945813, |
|
"grad_norm": 0.7667132134232257, |
|
"learning_rate": 1.3644830148102915e-05, |
|
"loss": 0.9454, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.44642857142857145, |
|
"grad_norm": 0.7343585908583702, |
|
"learning_rate": 1.3544502230989868e-05, |
|
"loss": 0.927, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.44950738916256155, |
|
"grad_norm": 0.7135467850082023, |
|
"learning_rate": 1.344376459083085e-05, |
|
"loss": 0.9248, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.4525862068965517, |
|
"grad_norm": 0.7291722090004472, |
|
"learning_rate": 1.3342628872288505e-05, |
|
"loss": 0.9401, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.45566502463054187, |
|
"grad_norm": 0.7200530793723301, |
|
"learning_rate": 1.3241106766040957e-05, |
|
"loss": 0.9097, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.45874384236453203, |
|
"grad_norm": 0.9856265254605108, |
|
"learning_rate": 1.3139210007430404e-05, |
|
"loss": 0.9283, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.4618226600985222, |
|
"grad_norm": 0.7341322171237868, |
|
"learning_rate": 1.3036950375106588e-05, |
|
"loss": 0.9333, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.4649014778325123, |
|
"grad_norm": 0.7546656555280624, |
|
"learning_rate": 1.2934339689665274e-05, |
|
"loss": 0.9042, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.46798029556650245, |
|
"grad_norm": 0.7226811365920998, |
|
"learning_rate": 1.283138981228184e-05, |
|
"loss": 0.9139, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.4710591133004926, |
|
"grad_norm": 0.7181811040603533, |
|
"learning_rate": 1.2728112643340201e-05, |
|
"loss": 0.9264, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.47413793103448276, |
|
"grad_norm": 0.8849529024867829, |
|
"learning_rate": 1.26245201210572e-05, |
|
"loss": 0.9177, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.4772167487684729, |
|
"grad_norm": 0.7087551590617035, |
|
"learning_rate": 1.2520624220102623e-05, |
|
"loss": 0.921, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.4802955665024631, |
|
"grad_norm": 0.6935685016158297, |
|
"learning_rate": 1.2416436950215001e-05, |
|
"loss": 0.9406, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.4833743842364532, |
|
"grad_norm": 0.7473725653174663, |
|
"learning_rate": 1.2311970354813345e-05, |
|
"loss": 0.9873, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.48645320197044334, |
|
"grad_norm": 0.657674984254088, |
|
"learning_rate": 1.220723650960502e-05, |
|
"loss": 0.927, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.4895320197044335, |
|
"grad_norm": 0.7186327207648633, |
|
"learning_rate": 1.2102247521189838e-05, |
|
"loss": 0.9004, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.49261083743842365, |
|
"grad_norm": 0.6823249047599709, |
|
"learning_rate": 1.199701552566064e-05, |
|
"loss": 0.8879, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.4956896551724138, |
|
"grad_norm": 0.705573871553639, |
|
"learning_rate": 1.1891552687200414e-05, |
|
"loss": 0.9649, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.4987684729064039, |
|
"grad_norm": 0.7071856320020758, |
|
"learning_rate": 1.1785871196676196e-05, |
|
"loss": 0.9731, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.5018472906403941, |
|
"grad_norm": 0.6605795316984785, |
|
"learning_rate": 1.167998327022988e-05, |
|
"loss": 0.9767, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.5049261083743842, |
|
"grad_norm": 0.689384383606719, |
|
"learning_rate": 1.1573901147866108e-05, |
|
"loss": 0.929, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.5080049261083743, |
|
"grad_norm": 0.7016418038725151, |
|
"learning_rate": 1.1467637092037399e-05, |
|
"loss": 0.9208, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.5110837438423645, |
|
"grad_norm": 0.6435950510096468, |
|
"learning_rate": 1.1361203386226672e-05, |
|
"loss": 0.933, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.5141625615763546, |
|
"grad_norm": 0.6915860359426802, |
|
"learning_rate": 1.1254612333527368e-05, |
|
"loss": 0.904, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.5172413793103449, |
|
"grad_norm": 0.7278487014487579, |
|
"learning_rate": 1.1147876255221274e-05, |
|
"loss": 0.926, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.520320197044335, |
|
"grad_norm": 0.8402556179449276, |
|
"learning_rate": 1.1041007489354263e-05, |
|
"loss": 0.91, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.5233990147783252, |
|
"grad_norm": 0.6913958208278679, |
|
"learning_rate": 1.093401838931009e-05, |
|
"loss": 0.9298, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.5264778325123153, |
|
"grad_norm": 0.6942000092673813, |
|
"learning_rate": 1.0826921322382407e-05, |
|
"loss": 0.9254, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.5295566502463054, |
|
"grad_norm": 0.6863019990229392, |
|
"learning_rate": 1.071972866834519e-05, |
|
"loss": 0.8875, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.5326354679802956, |
|
"grad_norm": 0.7444864272324827, |
|
"learning_rate": 1.061245281802171e-05, |
|
"loss": 0.9348, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.5357142857142857, |
|
"grad_norm": 0.7472300249765285, |
|
"learning_rate": 1.0505106171852226e-05, |
|
"loss": 0.8728, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.5387931034482759, |
|
"grad_norm": 0.7421508787127711, |
|
"learning_rate": 1.039770113846056e-05, |
|
"loss": 0.897, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.541871921182266, |
|
"grad_norm": 0.6931950229252386, |
|
"learning_rate": 1.0290250133219755e-05, |
|
"loss": 0.9215, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.5449507389162561, |
|
"grad_norm": 0.7648333450579011, |
|
"learning_rate": 1.0182765576816916e-05, |
|
"loss": 0.9577, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.5480295566502463, |
|
"grad_norm": 0.7006731215648692, |
|
"learning_rate": 1.0075259893817465e-05, |
|
"loss": 0.9245, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.5511083743842364, |
|
"grad_norm": 0.6687572466831397, |
|
"learning_rate": 9.967745511228922e-06, |
|
"loss": 0.8813, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.5541871921182266, |
|
"grad_norm": 0.7175380682948211, |
|
"learning_rate": 9.860234857064439e-06, |
|
"loss": 0.9319, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.5572660098522167, |
|
"grad_norm": 0.6948450711755244, |
|
"learning_rate": 9.752740358906168e-06, |
|
"loss": 0.9341, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.5603448275862069, |
|
"grad_norm": 0.6892353302488635, |
|
"learning_rate": 9.645274442468746e-06, |
|
"loss": 0.9269, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.5634236453201971, |
|
"grad_norm": 0.7325769139031748, |
|
"learning_rate": 9.537849530162915e-06, |
|
"loss": 0.9228, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.5665024630541872, |
|
"grad_norm": 0.7282730155939394, |
|
"learning_rate": 9.430478039659604e-06, |
|
"loss": 0.9138, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.5695812807881774, |
|
"grad_norm": 0.6428816898450572, |
|
"learning_rate": 9.323172382454494e-06, |
|
"loss": 0.9274, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.5726600985221675, |
|
"grad_norm": 0.6703704767112079, |
|
"learning_rate": 9.215944962433329e-06, |
|
"loss": 0.855, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.5757389162561576, |
|
"grad_norm": 0.7944858406137029, |
|
"learning_rate": 9.108808174438128e-06, |
|
"loss": 0.9416, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.5788177339901478, |
|
"grad_norm": 0.721760753318748, |
|
"learning_rate": 9.001774402834375e-06, |
|
"loss": 0.8895, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.5818965517241379, |
|
"grad_norm": 0.7210922094960251, |
|
"learning_rate": 8.89485602007949e-06, |
|
"loss": 0.952, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.5849753694581281, |
|
"grad_norm": 0.7216379603390005, |
|
"learning_rate": 8.788065385292637e-06, |
|
"loss": 0.9067, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.5880541871921182, |
|
"grad_norm": 0.6880918231702351, |
|
"learning_rate": 8.68141484282609e-06, |
|
"loss": 0.9327, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.5911330049261084, |
|
"grad_norm": 0.7777971176157483, |
|
"learning_rate": 8.574916720838293e-06, |
|
"loss": 0.9514, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.5942118226600985, |
|
"grad_norm": 0.7227495926203028, |
|
"learning_rate": 8.468583329868805e-06, |
|
"loss": 0.9463, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.5972906403940886, |
|
"grad_norm": 0.7219855521048865, |
|
"learning_rate": 8.362426961415274e-06, |
|
"loss": 0.944, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.6003694581280788, |
|
"grad_norm": 0.7155939312082987, |
|
"learning_rate": 8.256459886512618e-06, |
|
"loss": 0.9108, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.603448275862069, |
|
"grad_norm": 0.6993953380271827, |
|
"learning_rate": 8.150694354314556e-06, |
|
"loss": 0.9557, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.6065270935960592, |
|
"grad_norm": 0.7638296699531455, |
|
"learning_rate": 8.0451425906777e-06, |
|
"loss": 0.9312, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.6096059113300493, |
|
"grad_norm": 0.6831250724265701, |
|
"learning_rate": 7.939816796748296e-06, |
|
"loss": 0.8998, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.6126847290640394, |
|
"grad_norm": 0.7313287861208433, |
|
"learning_rate": 7.834729147551858e-06, |
|
"loss": 0.9255, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.6157635467980296, |
|
"grad_norm": 0.6590166847034968, |
|
"learning_rate": 7.729891790585817e-06, |
|
"loss": 0.8842, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6188423645320197, |
|
"grad_norm": 0.7323284885675908, |
|
"learning_rate": 7.625316844415327e-06, |
|
"loss": 0.8812, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.6219211822660099, |
|
"grad_norm": 0.7215619643224764, |
|
"learning_rate": 7.521016397272436e-06, |
|
"loss": 0.9061, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 0.7213661635662646, |
|
"learning_rate": 7.417002505658773e-06, |
|
"loss": 0.9604, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.6280788177339901, |
|
"grad_norm": 0.6664085186795183, |
|
"learning_rate": 7.313287192951866e-06, |
|
"loss": 0.9152, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.6311576354679803, |
|
"grad_norm": 0.8899370388302164, |
|
"learning_rate": 7.209882448015338e-06, |
|
"loss": 0.8997, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.6342364532019704, |
|
"grad_norm": 0.6925853341918634, |
|
"learning_rate": 7.1068002238130465e-06, |
|
"loss": 0.9186, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.6373152709359606, |
|
"grad_norm": 0.7526266211171548, |
|
"learning_rate": 7.004052436027397e-06, |
|
"loss": 0.907, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.6403940886699507, |
|
"grad_norm": 0.7116871357451918, |
|
"learning_rate": 6.901650961681976e-06, |
|
"loss": 0.9136, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.6434729064039408, |
|
"grad_norm": 0.7181997436838037, |
|
"learning_rate": 6.799607637768621e-06, |
|
"loss": 0.9165, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.646551724137931, |
|
"grad_norm": 0.7424070514212068, |
|
"learning_rate": 6.6979342598791395e-06, |
|
"loss": 0.9401, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.6496305418719212, |
|
"grad_norm": 0.7056624976371111, |
|
"learning_rate": 6.596642580841827e-06, |
|
"loss": 0.9151, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.6527093596059114, |
|
"grad_norm": 0.7679067760249104, |
|
"learning_rate": 6.495744309362879e-06, |
|
"loss": 0.8805, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.6557881773399015, |
|
"grad_norm": 0.7261332639619558, |
|
"learning_rate": 6.395251108672975e-06, |
|
"loss": 0.8987, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.6588669950738916, |
|
"grad_norm": 0.6531278111009767, |
|
"learning_rate": 6.295174595179041e-06, |
|
"loss": 0.9147, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.6619458128078818, |
|
"grad_norm": 0.74223655982831, |
|
"learning_rate": 6.195526337121483e-06, |
|
"loss": 0.9432, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.6650246305418719, |
|
"grad_norm": 0.6775090824932495, |
|
"learning_rate": 6.096317853236975e-06, |
|
"loss": 0.8558, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.6681034482758621, |
|
"grad_norm": 0.7451875244452356, |
|
"learning_rate": 5.997560611426947e-06, |
|
"loss": 0.9129, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.6711822660098522, |
|
"grad_norm": 0.775303530121011, |
|
"learning_rate": 5.899266027431965e-06, |
|
"loss": 0.9302, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.6742610837438424, |
|
"grad_norm": 0.6681173244510462, |
|
"learning_rate": 5.80144546351216e-06, |
|
"loss": 0.8755, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.6773399014778325, |
|
"grad_norm": 0.6460985572337176, |
|
"learning_rate": 5.704110227133792e-06, |
|
"loss": 0.916, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.6804187192118226, |
|
"grad_norm": 0.6903928180028339, |
|
"learning_rate": 5.607271569662203e-06, |
|
"loss": 0.8914, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.6834975369458128, |
|
"grad_norm": 0.7795810817387983, |
|
"learning_rate": 5.510940685061202e-06, |
|
"loss": 0.8998, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.6865763546798029, |
|
"grad_norm": 0.7672315116978631, |
|
"learning_rate": 5.41512870859912e-06, |
|
"loss": 0.9303, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.6896551724137931, |
|
"grad_norm": 0.7806157930588039, |
|
"learning_rate": 5.319846715561656e-06, |
|
"loss": 0.9332, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.6927339901477833, |
|
"grad_norm": 0.7003891392810607, |
|
"learning_rate": 5.225105719971615e-06, |
|
"loss": 0.8993, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.6958128078817734, |
|
"grad_norm": 0.6721487834927411, |
|
"learning_rate": 5.130916673315762e-06, |
|
"loss": 0.941, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.6988916256157636, |
|
"grad_norm": 0.6757485350567788, |
|
"learning_rate": 5.037290463278914e-06, |
|
"loss": 0.9011, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.7019704433497537, |
|
"grad_norm": 0.7048595319775736, |
|
"learning_rate": 4.94423791248536e-06, |
|
"loss": 0.8987, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.7050492610837439, |
|
"grad_norm": 0.6775506981745996, |
|
"learning_rate": 4.851769777247857e-06, |
|
"loss": 0.9616, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.708128078817734, |
|
"grad_norm": 0.7082392404353789, |
|
"learning_rate": 4.759896746324247e-06, |
|
"loss": 0.9016, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.7112068965517241, |
|
"grad_norm": 0.6819223359256046, |
|
"learning_rate": 4.668629439681907e-06, |
|
"loss": 0.9272, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.7142857142857143, |
|
"grad_norm": 0.6845551815152396, |
|
"learning_rate": 4.577978407270156e-06, |
|
"loss": 0.933, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.7173645320197044, |
|
"grad_norm": 0.6950058521766465, |
|
"learning_rate": 4.487954127800726e-06, |
|
"loss": 0.9041, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.7204433497536946, |
|
"grad_norm": 0.6867684845183903, |
|
"learning_rate": 4.398567007536493e-06, |
|
"loss": 0.9612, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.7235221674876847, |
|
"grad_norm": 0.6586394312886726, |
|
"learning_rate": 4.309827379088589e-06, |
|
"loss": 0.9089, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.7266009852216748, |
|
"grad_norm": 0.7365265974356552, |
|
"learning_rate": 4.221745500221992e-06, |
|
"loss": 0.8724, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.729679802955665, |
|
"grad_norm": 0.7378780810314269, |
|
"learning_rate": 4.134331552669812e-06, |
|
"loss": 0.9275, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.7327586206896551, |
|
"grad_norm": 0.6956027586420739, |
|
"learning_rate": 4.047595640956326e-06, |
|
"loss": 0.9094, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.7358374384236454, |
|
"grad_norm": 0.6644083331902493, |
|
"learning_rate": 3.961547791228963e-06, |
|
"loss": 0.8809, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.7389162561576355, |
|
"grad_norm": 0.6780105644457819, |
|
"learning_rate": 3.876197950099351e-06, |
|
"loss": 0.8894, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.7419950738916257, |
|
"grad_norm": 0.7238626484216509, |
|
"learning_rate": 3.7915559834935355e-06, |
|
"loss": 0.9182, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.7450738916256158, |
|
"grad_norm": 0.677354013936962, |
|
"learning_rate": 3.7076316755115407e-06, |
|
"loss": 0.9056, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.7481527093596059, |
|
"grad_norm": 0.6887868120830557, |
|
"learning_rate": 3.6244347272963974e-06, |
|
"loss": 0.9033, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.7512315270935961, |
|
"grad_norm": 0.7935141959330477, |
|
"learning_rate": 3.5419747559127294e-06, |
|
"loss": 0.9077, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.7543103448275862, |
|
"grad_norm": 0.717043682095087, |
|
"learning_rate": 3.4602612932351065e-06, |
|
"loss": 0.9383, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.7573891625615764, |
|
"grad_norm": 0.7125434144608334, |
|
"learning_rate": 3.3793037848461873e-06, |
|
"loss": 0.9025, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.7604679802955665, |
|
"grad_norm": 0.6915559899322756, |
|
"learning_rate": 3.2991115889448877e-06, |
|
"loss": 0.9113, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.7635467980295566, |
|
"grad_norm": 0.6806388512684862, |
|
"learning_rate": 3.2196939752646183e-06, |
|
"loss": 0.9104, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.7666256157635468, |
|
"grad_norm": 0.7595959827137008, |
|
"learning_rate": 3.141060124001776e-06, |
|
"loss": 0.9138, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.7697044334975369, |
|
"grad_norm": 0.7234170553960368, |
|
"learning_rate": 3.063219124754543e-06, |
|
"loss": 0.9132, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.7727832512315271, |
|
"grad_norm": 0.7023061276615833, |
|
"learning_rate": 2.9861799754722033e-06, |
|
"loss": 0.892, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.7758620689655172, |
|
"grad_norm": 0.6711943725081508, |
|
"learning_rate": 2.9099515814150335e-06, |
|
"loss": 0.887, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.7789408866995073, |
|
"grad_norm": 0.725196707404757, |
|
"learning_rate": 2.8345427541248993e-06, |
|
"loss": 0.8776, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.7820197044334976, |
|
"grad_norm": 0.6608109215361615, |
|
"learning_rate": 2.7599622104066937e-06, |
|
"loss": 0.9427, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.7850985221674877, |
|
"grad_norm": 0.6996055449820648, |
|
"learning_rate": 2.6862185713207467e-06, |
|
"loss": 0.9142, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.7881773399014779, |
|
"grad_norm": 0.7432961342047957, |
|
"learning_rate": 2.6133203611862554e-06, |
|
"loss": 0.9064, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.791256157635468, |
|
"grad_norm": 0.6236987444510391, |
|
"learning_rate": 2.5412760065959386e-06, |
|
"loss": 0.9048, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.7943349753694581, |
|
"grad_norm": 0.6220890988258023, |
|
"learning_rate": 2.4700938354419823e-06, |
|
"loss": 0.8903, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.7974137931034483, |
|
"grad_norm": 0.6892863270775752, |
|
"learning_rate": 2.3997820759533654e-06, |
|
"loss": 0.913, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.8004926108374384, |
|
"grad_norm": 1.6773601433209218, |
|
"learning_rate": 2.3303488557447374e-06, |
|
"loss": 0.8753, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.8035714285714286, |
|
"grad_norm": 0.722872865650384, |
|
"learning_rate": 2.26180220087692e-06, |
|
"loss": 0.8957, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.8066502463054187, |
|
"grad_norm": 0.6613242490598888, |
|
"learning_rate": 2.194150034929133e-06, |
|
"loss": 0.9054, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.8097290640394089, |
|
"grad_norm": 0.7330833022368549, |
|
"learning_rate": 2.1274001780830776e-06, |
|
"loss": 0.9283, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.812807881773399, |
|
"grad_norm": 0.6778392727939891, |
|
"learning_rate": 2.0615603462189824e-06, |
|
"loss": 0.9288, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.8158866995073891, |
|
"grad_norm": 0.7707106273789677, |
|
"learning_rate": 1.9966381500236786e-06, |
|
"loss": 0.875, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.8189655172413793, |
|
"grad_norm": 0.6432858185048476, |
|
"learning_rate": 1.932641094110855e-06, |
|
"loss": 0.8971, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.8220443349753694, |
|
"grad_norm": 0.6914786964712628, |
|
"learning_rate": 1.869576576153581e-06, |
|
"loss": 0.8654, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.8251231527093597, |
|
"grad_norm": 0.7215233738169529, |
|
"learning_rate": 1.8074518860291646e-06, |
|
"loss": 0.909, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.8282019704433498, |
|
"grad_norm": 0.8465873318075495, |
|
"learning_rate": 1.746274204976498e-06, |
|
"loss": 0.9038, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.8312807881773399, |
|
"grad_norm": 0.7216800410068818, |
|
"learning_rate": 1.6860506047659442e-06, |
|
"loss": 0.9222, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.8343596059113301, |
|
"grad_norm": 0.7685628442751503, |
|
"learning_rate": 1.6267880468818787e-06, |
|
"loss": 0.8895, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.8374384236453202, |
|
"grad_norm": 0.7314941509944337, |
|
"learning_rate": 1.5684933817180014e-06, |
|
"loss": 0.9106, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.8405172413793104, |
|
"grad_norm": 0.7642178298055531, |
|
"learning_rate": 1.5111733477854507e-06, |
|
"loss": 0.9374, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.8435960591133005, |
|
"grad_norm": 0.6424365034435878, |
|
"learning_rate": 1.454834570933884e-06, |
|
"loss": 0.899, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.8466748768472906, |
|
"grad_norm": 0.7054107402438561, |
|
"learning_rate": 1.399483563585573e-06, |
|
"loss": 0.8976, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.8497536945812808, |
|
"grad_norm": 0.7944345419145749, |
|
"learning_rate": 1.345126723982594e-06, |
|
"loss": 0.9645, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.8528325123152709, |
|
"grad_norm": 0.7297116413505407, |
|
"learning_rate": 1.2917703354472467e-06, |
|
"loss": 0.9013, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.8559113300492611, |
|
"grad_norm": 0.7016058795668882, |
|
"learning_rate": 1.2394205656557224e-06, |
|
"loss": 0.886, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.8589901477832512, |
|
"grad_norm": 0.6817325792524672, |
|
"learning_rate": 1.1880834659251706e-06, |
|
"loss": 0.9106, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.8620689655172413, |
|
"grad_norm": 0.7175638109399047, |
|
"learning_rate": 1.1377649705142012e-06, |
|
"loss": 0.9046, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.8651477832512315, |
|
"grad_norm": 0.7325286340177533, |
|
"learning_rate": 1.0884708959369116e-06, |
|
"loss": 0.9344, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.8682266009852216, |
|
"grad_norm": 0.6995057655558119, |
|
"learning_rate": 1.040206940290547e-06, |
|
"loss": 0.8886, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.8713054187192119, |
|
"grad_norm": 0.6821188835840444, |
|
"learning_rate": 9.929786825968213e-07, |
|
"loss": 0.8976, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.874384236453202, |
|
"grad_norm": 0.7376721682017519, |
|
"learning_rate": 9.467915821570228e-07, |
|
"loss": 0.9492, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.8774630541871922, |
|
"grad_norm": 0.9328210953314316, |
|
"learning_rate": 9.016509779209536e-07, |
|
"loss": 0.9074, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.8805418719211823, |
|
"grad_norm": 0.7340819327835291, |
|
"learning_rate": 8.575620878697744e-07, |
|
"loss": 0.8837, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.8836206896551724, |
|
"grad_norm": 0.6845434866282742, |
|
"learning_rate": 8.145300084128349e-07, |
|
"loss": 0.9185, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.8866995073891626, |
|
"grad_norm": 0.6628364676048413, |
|
"learning_rate": 7.725597137985741e-07, |
|
"loss": 0.8584, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.8897783251231527, |
|
"grad_norm": 0.7208226072354926, |
|
"learning_rate": 7.316560555395069e-07, |
|
"loss": 0.8826, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.8928571428571429, |
|
"grad_norm": 0.6773256291080844, |
|
"learning_rate": 6.918237618514378e-07, |
|
"loss": 0.856, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.895935960591133, |
|
"grad_norm": 0.7034404298614597, |
|
"learning_rate": 6.530674371068946e-07, |
|
"loss": 0.9128, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.8990147783251231, |
|
"grad_norm": 0.7210199225257817, |
|
"learning_rate": 6.153915613028915e-07, |
|
"loss": 0.9038, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.9020935960591133, |
|
"grad_norm": 0.7406834860134297, |
|
"learning_rate": 5.788004895430799e-07, |
|
"loss": 0.8682, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.9051724137931034, |
|
"grad_norm": 0.7117319738607917, |
|
"learning_rate": 5.43298451534312e-07, |
|
"loss": 0.9407, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.9082512315270936, |
|
"grad_norm": 0.6909093195401991, |
|
"learning_rate": 5.088895510977154e-07, |
|
"loss": 0.8934, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.9113300492610837, |
|
"grad_norm": 0.6395163285948796, |
|
"learning_rate": 4.755777656943239e-07, |
|
"loss": 0.9065, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.9144088669950738, |
|
"grad_norm": 0.7727497202740083, |
|
"learning_rate": 4.433669459652945e-07, |
|
"loss": 0.8946, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.9174876847290641, |
|
"grad_norm": 0.6727790753972084, |
|
"learning_rate": 4.1226081528680907e-07, |
|
"loss": 0.9206, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.9205665024630542, |
|
"grad_norm": 0.6849296173953379, |
|
"learning_rate": 3.822629693396651e-07, |
|
"loss": 0.8824, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.9236453201970444, |
|
"grad_norm": 0.7395506995675738, |
|
"learning_rate": 3.5337687569363734e-07, |
|
"loss": 0.9219, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.9267241379310345, |
|
"grad_norm": 0.7042269789976827, |
|
"learning_rate": 3.2560587340665694e-07, |
|
"loss": 0.9093, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.9298029556650246, |
|
"grad_norm": 0.7584121398933832, |
|
"learning_rate": 2.989531726388262e-07, |
|
"loss": 0.9112, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.9328817733990148, |
|
"grad_norm": 0.7779893269073751, |
|
"learning_rate": 2.7342185428134604e-07, |
|
"loss": 0.9122, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.9359605911330049, |
|
"grad_norm": 0.6361716458037133, |
|
"learning_rate": 2.4901486960039025e-07, |
|
"loss": 0.9178, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.9390394088669951, |
|
"grad_norm": 0.7272001913384153, |
|
"learning_rate": 2.257350398959457e-07, |
|
"loss": 0.8953, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.9421182266009852, |
|
"grad_norm": 0.6848577166162307, |
|
"learning_rate": 2.035850561756969e-07, |
|
"loss": 0.9155, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.9451970443349754, |
|
"grad_norm": 0.7198842919309125, |
|
"learning_rate": 1.8256747884395577e-07, |
|
"loss": 0.9037, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.9482758620689655, |
|
"grad_norm": 0.7115101379396237, |
|
"learning_rate": 1.6268473740569723e-07, |
|
"loss": 0.9647, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.9513546798029556, |
|
"grad_norm": 0.7368827581632996, |
|
"learning_rate": 1.4393913018572182e-07, |
|
"loss": 0.8805, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.9544334975369458, |
|
"grad_norm": 0.8079433748319568, |
|
"learning_rate": 1.2633282406298576e-07, |
|
"loss": 0.896, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.9575123152709359, |
|
"grad_norm": 0.6587864044048146, |
|
"learning_rate": 1.0986785422011593e-07, |
|
"loss": 0.8944, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.9605911330049262, |
|
"grad_norm": 0.6943529244689446, |
|
"learning_rate": 9.454612390816686e-08, |
|
"loss": 0.892, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.9636699507389163, |
|
"grad_norm": 0.7475181568465236, |
|
"learning_rate": 8.036940422660345e-08, |
|
"loss": 0.9109, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.9667487684729064, |
|
"grad_norm": 0.712566295810641, |
|
"learning_rate": 6.733933391858238e-08, |
|
"loss": 0.9387, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.9698275862068966, |
|
"grad_norm": 0.6953912672416684, |
|
"learning_rate": 5.5457419181517145e-08, |
|
"loss": 0.9009, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.9729064039408867, |
|
"grad_norm": 0.6683739761168683, |
|
"learning_rate": 4.472503349297497e-08, |
|
"loss": 0.8673, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.9759852216748769, |
|
"grad_norm": 0.69079632147327, |
|
"learning_rate": 3.5143417451907195e-08, |
|
"loss": 0.8563, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.979064039408867, |
|
"grad_norm": 0.708420869441777, |
|
"learning_rate": 2.671367863524732e-08, |
|
"loss": 0.9274, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.9821428571428571, |
|
"grad_norm": 0.8174030990687192, |
|
"learning_rate": 1.943679146988009e-08, |
|
"loss": 0.9223, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.9852216748768473, |
|
"grad_norm": 0.742245024367394, |
|
"learning_rate": 1.3313597120002686e-08, |
|
"loss": 0.9255, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.9883004926108374, |
|
"grad_norm": 0.6809579676933898, |
|
"learning_rate": 8.34480338989141e-09, |
|
"loss": 0.9343, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.9913793103448276, |
|
"grad_norm": 0.6818330666493024, |
|
"learning_rate": 4.530984642087122e-09, |
|
"loss": 0.8425, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.9944581280788177, |
|
"grad_norm": 0.6517282911169013, |
|
"learning_rate": 1.872581730997247e-09, |
|
"loss": 0.9258, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.9975369458128078, |
|
"grad_norm": 0.7361227785975518, |
|
"learning_rate": 3.699019519387559e-10, |
|
"loss": 0.8655, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.9033653736114502, |
|
"eval_runtime": 1302.7533, |
|
"eval_samples_per_second": 17.739, |
|
"eval_steps_per_second": 0.555, |
|
"step": 1624 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1624, |
|
"total_flos": 165422873640960.0, |
|
"train_loss": 0.9319284432040059, |
|
"train_runtime": 12380.9765, |
|
"train_samples_per_second": 4.197, |
|
"train_steps_per_second": 0.131 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1624, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 165422873640960.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|