|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1661, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0006020469596628537, |
|
"grad_norm": 10.688992810202613, |
|
"learning_rate": 5.98802395209581e-08, |
|
"loss": 1.7931, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0030102347983142685, |
|
"grad_norm": 11.087545625778352, |
|
"learning_rate": 2.9940119760479047e-07, |
|
"loss": 1.7944, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.006020469596628537, |
|
"grad_norm": 10.534678516725148, |
|
"learning_rate": 5.988023952095809e-07, |
|
"loss": 1.7878, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.009030704394942806, |
|
"grad_norm": 9.574983566804878, |
|
"learning_rate": 8.982035928143713e-07, |
|
"loss": 1.7696, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.012040939193257074, |
|
"grad_norm": 7.113460969940963, |
|
"learning_rate": 1.1976047904191619e-06, |
|
"loss": 1.6818, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.015051173991571343, |
|
"grad_norm": 2.6776966029838385, |
|
"learning_rate": 1.4970059880239521e-06, |
|
"loss": 1.5898, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.018061408789885613, |
|
"grad_norm": 2.2972572336933976, |
|
"learning_rate": 1.7964071856287426e-06, |
|
"loss": 1.4968, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02107164358819988, |
|
"grad_norm": 2.4202996961757077, |
|
"learning_rate": 2.095808383233533e-06, |
|
"loss": 1.4052, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.024081878386514148, |
|
"grad_norm": 1.7252594455655237, |
|
"learning_rate": 2.3952095808383237e-06, |
|
"loss": 1.3382, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.027092113184828417, |
|
"grad_norm": 1.247660016611823, |
|
"learning_rate": 2.694610778443114e-06, |
|
"loss": 1.2973, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.030102347983142687, |
|
"grad_norm": 1.1871096528896479, |
|
"learning_rate": 2.9940119760479042e-06, |
|
"loss": 1.2516, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.033112582781456956, |
|
"grad_norm": 1.062407029099313, |
|
"learning_rate": 3.2934131736526947e-06, |
|
"loss": 1.2233, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.036122817579771226, |
|
"grad_norm": 1.074671822399871, |
|
"learning_rate": 3.592814371257485e-06, |
|
"loss": 1.2079, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03913305237808549, |
|
"grad_norm": 1.0045673658364525, |
|
"learning_rate": 3.892215568862276e-06, |
|
"loss": 1.1883, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.04214328717639976, |
|
"grad_norm": 0.9990428932639726, |
|
"learning_rate": 4.191616766467066e-06, |
|
"loss": 1.192, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04515352197471403, |
|
"grad_norm": 1.041260547807295, |
|
"learning_rate": 4.4910179640718566e-06, |
|
"loss": 1.1724, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.048163756773028296, |
|
"grad_norm": 1.0886242768356977, |
|
"learning_rate": 4.7904191616766475e-06, |
|
"loss": 1.1507, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.051173991571342566, |
|
"grad_norm": 1.0469933132322005, |
|
"learning_rate": 5.0898203592814375e-06, |
|
"loss": 1.1648, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.054184226369656835, |
|
"grad_norm": 1.0031768043333225, |
|
"learning_rate": 5.389221556886228e-06, |
|
"loss": 1.1557, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.057194461167971104, |
|
"grad_norm": 1.0235059168601668, |
|
"learning_rate": 5.6886227544910184e-06, |
|
"loss": 1.1433, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.060204695966285374, |
|
"grad_norm": 1.0810995112786321, |
|
"learning_rate": 5.9880239520958085e-06, |
|
"loss": 1.13, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.06321493076459964, |
|
"grad_norm": 1.0676305065720928, |
|
"learning_rate": 6.2874251497005985e-06, |
|
"loss": 1.1232, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.06622516556291391, |
|
"grad_norm": 1.1136702042740867, |
|
"learning_rate": 6.586826347305389e-06, |
|
"loss": 1.1148, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06923540036122817, |
|
"grad_norm": 1.0798487239001604, |
|
"learning_rate": 6.88622754491018e-06, |
|
"loss": 1.1084, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.07224563515954245, |
|
"grad_norm": 1.12910359621852, |
|
"learning_rate": 7.18562874251497e-06, |
|
"loss": 1.1079, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.07525586995785671, |
|
"grad_norm": 1.0750989910445923, |
|
"learning_rate": 7.485029940119761e-06, |
|
"loss": 1.1073, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.07826610475617098, |
|
"grad_norm": 1.0718510986699241, |
|
"learning_rate": 7.784431137724551e-06, |
|
"loss": 1.0852, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.08127633955448525, |
|
"grad_norm": 1.093833315661369, |
|
"learning_rate": 8.083832335329342e-06, |
|
"loss": 1.1007, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.08428657435279951, |
|
"grad_norm": 1.089981936562526, |
|
"learning_rate": 8.383233532934131e-06, |
|
"loss": 1.0668, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.08729680915111379, |
|
"grad_norm": 1.0716347647949742, |
|
"learning_rate": 8.682634730538922e-06, |
|
"loss": 1.0671, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.09030704394942805, |
|
"grad_norm": 1.1760500582380917, |
|
"learning_rate": 8.982035928143713e-06, |
|
"loss": 1.052, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.09331727874774233, |
|
"grad_norm": 1.1394001820505688, |
|
"learning_rate": 9.281437125748504e-06, |
|
"loss": 1.0786, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.09632751354605659, |
|
"grad_norm": 1.1206173669500894, |
|
"learning_rate": 9.580838323353295e-06, |
|
"loss": 1.069, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.09933774834437085, |
|
"grad_norm": 1.1225031587454042, |
|
"learning_rate": 9.880239520958084e-06, |
|
"loss": 1.0407, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.10234798314268513, |
|
"grad_norm": 1.0802419173541655, |
|
"learning_rate": 9.999900509954779e-06, |
|
"loss": 1.0378, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1053582179409994, |
|
"grad_norm": 1.1285826979669467, |
|
"learning_rate": 9.999292529572152e-06, |
|
"loss": 1.0404, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.10836845273931367, |
|
"grad_norm": 1.3030407887693638, |
|
"learning_rate": 9.998131908181262e-06, |
|
"loss": 1.0368, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.11137868753762793, |
|
"grad_norm": 1.13304407504489, |
|
"learning_rate": 9.996418774081658e-06, |
|
"loss": 1.0194, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.11438892233594221, |
|
"grad_norm": 1.148326778572453, |
|
"learning_rate": 9.994153316649769e-06, |
|
"loss": 1.0308, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.11739915713425647, |
|
"grad_norm": 1.0879087960219742, |
|
"learning_rate": 9.991335786317964e-06, |
|
"loss": 1.0101, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.12040939193257075, |
|
"grad_norm": 1.1573320998902041, |
|
"learning_rate": 9.987966494546873e-06, |
|
"loss": 0.9851, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.12341962673088501, |
|
"grad_norm": 1.1225156773539111, |
|
"learning_rate": 9.984045813790959e-06, |
|
"loss": 0.9975, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.12642986152919927, |
|
"grad_norm": 1.1163693583224434, |
|
"learning_rate": 9.979574177457337e-06, |
|
"loss": 0.9957, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.12944009632751355, |
|
"grad_norm": 1.1054539548885067, |
|
"learning_rate": 9.974552079857873e-06, |
|
"loss": 0.9944, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.13245033112582782, |
|
"grad_norm": 1.091493000603608, |
|
"learning_rate": 9.968980076154533e-06, |
|
"loss": 1.004, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.13546056592414207, |
|
"grad_norm": 1.1518094795338838, |
|
"learning_rate": 9.962858782298023e-06, |
|
"loss": 0.9779, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.13847080072245635, |
|
"grad_norm": 1.1074295258081415, |
|
"learning_rate": 9.956188874959686e-06, |
|
"loss": 0.9842, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.14148103552077063, |
|
"grad_norm": 1.0975825113307536, |
|
"learning_rate": 9.948971091456715e-06, |
|
"loss": 0.9764, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.1444912703190849, |
|
"grad_norm": 1.0992803599557273, |
|
"learning_rate": 9.941206229670634e-06, |
|
"loss": 0.9704, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.14750150511739915, |
|
"grad_norm": 1.105958751777608, |
|
"learning_rate": 9.932895147959106e-06, |
|
"loss": 0.9568, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.15051173991571343, |
|
"grad_norm": 1.1720050961084032, |
|
"learning_rate": 9.924038765061042e-06, |
|
"loss": 0.942, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.1535219747140277, |
|
"grad_norm": 1.116935163255732, |
|
"learning_rate": 9.91463805999504e-06, |
|
"loss": 0.9577, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.15653220951234195, |
|
"grad_norm": 1.1268188801983967, |
|
"learning_rate": 9.904694071951167e-06, |
|
"loss": 0.96, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.15954244431065623, |
|
"grad_norm": 1.0461466251094855, |
|
"learning_rate": 9.894207900176074e-06, |
|
"loss": 0.9424, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.1625526791089705, |
|
"grad_norm": 1.0867508676115896, |
|
"learning_rate": 9.883180703851488e-06, |
|
"loss": 0.948, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.16556291390728478, |
|
"grad_norm": 1.1549604201551043, |
|
"learning_rate": 9.871613701966067e-06, |
|
"loss": 0.9444, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.16857314870559903, |
|
"grad_norm": 1.0712495395377322, |
|
"learning_rate": 9.859508173180653e-06, |
|
"loss": 0.9245, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.1715833835039133, |
|
"grad_norm": 1.0255493028911156, |
|
"learning_rate": 9.846865455686915e-06, |
|
"loss": 0.9309, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.17459361830222758, |
|
"grad_norm": 1.0229903229977713, |
|
"learning_rate": 9.833686947059436e-06, |
|
"loss": 0.9247, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.17760385310054183, |
|
"grad_norm": 1.0193028798235328, |
|
"learning_rate": 9.819974104101198e-06, |
|
"loss": 0.9252, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.1806140878988561, |
|
"grad_norm": 1.026115519573417, |
|
"learning_rate": 9.80572844268256e-06, |
|
"loss": 0.9134, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.18362432269717038, |
|
"grad_norm": 1.0506692055629476, |
|
"learning_rate": 9.790951537573686e-06, |
|
"loss": 0.9415, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.18663455749548466, |
|
"grad_norm": 1.0790936545456895, |
|
"learning_rate": 9.775645022270448e-06, |
|
"loss": 0.9322, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.1896447922937989, |
|
"grad_norm": 1.025793289075722, |
|
"learning_rate": 9.759810588813872e-06, |
|
"loss": 0.928, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.19265502709211318, |
|
"grad_norm": 1.1339358401456807, |
|
"learning_rate": 9.743449987603082e-06, |
|
"loss": 0.9245, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.19566526189042746, |
|
"grad_norm": 1.1124339419780245, |
|
"learning_rate": 9.726565027201813e-06, |
|
"loss": 0.9422, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.1986754966887417, |
|
"grad_norm": 1.0501127986511691, |
|
"learning_rate": 9.70915757413847e-06, |
|
"loss": 0.934, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.20168573148705599, |
|
"grad_norm": 1.036530336541463, |
|
"learning_rate": 9.691229552699817e-06, |
|
"loss": 0.913, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.20469596628537026, |
|
"grad_norm": 1.0715370900298593, |
|
"learning_rate": 9.672782944718234e-06, |
|
"loss": 0.929, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.20770620108368454, |
|
"grad_norm": 1.0842780971098096, |
|
"learning_rate": 9.65381978935266e-06, |
|
"loss": 0.9225, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.2107164358819988, |
|
"grad_norm": 1.0776823644021207, |
|
"learning_rate": 9.634342182863163e-06, |
|
"loss": 0.8971, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.21372667068031306, |
|
"grad_norm": 1.0787360433631494, |
|
"learning_rate": 9.614352278379217e-06, |
|
"loss": 0.9315, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.21673690547862734, |
|
"grad_norm": 1.025638770495932, |
|
"learning_rate": 9.593852285661684e-06, |
|
"loss": 0.9302, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.2197471402769416, |
|
"grad_norm": 1.0128686388946724, |
|
"learning_rate": 9.572844470858537e-06, |
|
"loss": 0.9166, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.22275737507525586, |
|
"grad_norm": 1.1031191523153046, |
|
"learning_rate": 9.551331156254358e-06, |
|
"loss": 0.9229, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.22576760987357014, |
|
"grad_norm": 1.0053017279332288, |
|
"learning_rate": 9.529314720013618e-06, |
|
"loss": 0.9125, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.22877784467188442, |
|
"grad_norm": 1.0494040246870382, |
|
"learning_rate": 9.506797595917787e-06, |
|
"loss": 0.9195, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.23178807947019867, |
|
"grad_norm": 1.0465974471535013, |
|
"learning_rate": 9.483782273096295e-06, |
|
"loss": 0.9135, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.23479831426851294, |
|
"grad_norm": 1.0280771562627427, |
|
"learning_rate": 9.460271295751373e-06, |
|
"loss": 0.9283, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.23780854906682722, |
|
"grad_norm": 1.0317863254361297, |
|
"learning_rate": 9.436267262876808e-06, |
|
"loss": 0.9139, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.2408187838651415, |
|
"grad_norm": 1.006002207762667, |
|
"learning_rate": 9.411772827970642e-06, |
|
"loss": 0.911, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.24382901866345574, |
|
"grad_norm": 1.0330236444943743, |
|
"learning_rate": 9.38679069874184e-06, |
|
"loss": 0.9129, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.24683925346177002, |
|
"grad_norm": 1.0502639838743322, |
|
"learning_rate": 9.36132363681097e-06, |
|
"loss": 0.9161, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.2498494882600843, |
|
"grad_norm": 1.0378581674586218, |
|
"learning_rate": 9.335374457404928e-06, |
|
"loss": 0.9241, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.25285972305839854, |
|
"grad_norm": 1.0263844252114644, |
|
"learning_rate": 9.308946029045726e-06, |
|
"loss": 0.8987, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.25586995785671285, |
|
"grad_norm": 1.032423826827243, |
|
"learning_rate": 9.282041273233402e-06, |
|
"loss": 0.894, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.2588801926550271, |
|
"grad_norm": 1.026066371094123, |
|
"learning_rate": 9.254663164123052e-06, |
|
"loss": 0.8999, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.26189042745334135, |
|
"grad_norm": 1.0694165068256545, |
|
"learning_rate": 9.226814728196072e-06, |
|
"loss": 0.8997, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.26490066225165565, |
|
"grad_norm": 1.0389056305644437, |
|
"learning_rate": 9.198499043925591e-06, |
|
"loss": 0.903, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.2679108970499699, |
|
"grad_norm": 1.0691386715819782, |
|
"learning_rate": 9.169719241436162e-06, |
|
"loss": 0.9077, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.27092113184828415, |
|
"grad_norm": 1.0403573239959019, |
|
"learning_rate": 9.14047850215775e-06, |
|
"loss": 0.8856, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.27393136664659845, |
|
"grad_norm": 1.0321572558705492, |
|
"learning_rate": 9.110780058474052e-06, |
|
"loss": 0.8884, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.2769416014449127, |
|
"grad_norm": 1.0428857005082819, |
|
"learning_rate": 9.080627193365155e-06, |
|
"loss": 0.9014, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.27995183624322695, |
|
"grad_norm": 1.0172513904579585, |
|
"learning_rate": 9.050023240044649e-06, |
|
"loss": 0.9041, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.28296207104154125, |
|
"grad_norm": 1.036966322792995, |
|
"learning_rate": 9.018971581591141e-06, |
|
"loss": 0.8921, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.2859723058398555, |
|
"grad_norm": 1.0042423042637907, |
|
"learning_rate": 8.987475650574289e-06, |
|
"loss": 0.9183, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.2889825406381698, |
|
"grad_norm": 1.0812852684698064, |
|
"learning_rate": 8.955538928675343e-06, |
|
"loss": 0.8838, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.29199277543648405, |
|
"grad_norm": 1.0364976212069876, |
|
"learning_rate": 8.923164946302274e-06, |
|
"loss": 0.9004, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.2950030102347983, |
|
"grad_norm": 1.025560565799461, |
|
"learning_rate": 8.890357282199504e-06, |
|
"loss": 0.9082, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.2980132450331126, |
|
"grad_norm": 1.0120033023068284, |
|
"learning_rate": 8.857119563052301e-06, |
|
"loss": 0.8803, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.30102347983142685, |
|
"grad_norm": 1.0202841174928665, |
|
"learning_rate": 8.823455463085873e-06, |
|
"loss": 0.8996, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.3040337146297411, |
|
"grad_norm": 1.0422192444275267, |
|
"learning_rate": 8.789368703659199e-06, |
|
"loss": 0.9037, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.3070439494280554, |
|
"grad_norm": 1.039216841812148, |
|
"learning_rate": 8.754863052853658e-06, |
|
"loss": 0.8962, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.31005418422636966, |
|
"grad_norm": 1.03300361427746, |
|
"learning_rate": 8.719942325056496e-06, |
|
"loss": 0.8751, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.3130644190246839, |
|
"grad_norm": 1.056478959906002, |
|
"learning_rate": 8.68461038053916e-06, |
|
"loss": 0.8828, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.3160746538229982, |
|
"grad_norm": 1.091546086658953, |
|
"learning_rate": 8.648871125030576e-06, |
|
"loss": 0.8854, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.31908488862131246, |
|
"grad_norm": 1.0222370552684752, |
|
"learning_rate": 8.612728509285395e-06, |
|
"loss": 0.8798, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.3220951234196267, |
|
"grad_norm": 0.9917875099200865, |
|
"learning_rate": 8.576186528647253e-06, |
|
"loss": 0.8982, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.325105358217941, |
|
"grad_norm": 0.9754319321885829, |
|
"learning_rate": 8.53924922260712e-06, |
|
"loss": 0.8871, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.32811559301625526, |
|
"grad_norm": 1.0272701098817443, |
|
"learning_rate": 8.501920674356755e-06, |
|
"loss": 0.8713, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.33112582781456956, |
|
"grad_norm": 1.0815374085785938, |
|
"learning_rate": 8.46420501033733e-06, |
|
"loss": 0.8736, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.3341360626128838, |
|
"grad_norm": 1.073357925922567, |
|
"learning_rate": 8.42610639978329e-06, |
|
"loss": 0.8802, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.33714629741119806, |
|
"grad_norm": 1.0305445781682847, |
|
"learning_rate": 8.387629054261454e-06, |
|
"loss": 0.8758, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.34015653220951236, |
|
"grad_norm": 1.038417001227087, |
|
"learning_rate": 8.348777227205462e-06, |
|
"loss": 0.875, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.3431667670078266, |
|
"grad_norm": 1.072317877743011, |
|
"learning_rate": 8.309555213445583e-06, |
|
"loss": 0.8942, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.34617700180614086, |
|
"grad_norm": 1.0587298691073423, |
|
"learning_rate": 8.269967348733947e-06, |
|
"loss": 0.869, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.34918723660445516, |
|
"grad_norm": 1.0204733096808816, |
|
"learning_rate": 8.230018009265255e-06, |
|
"loss": 0.8814, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.3521974714027694, |
|
"grad_norm": 1.0052226002834497, |
|
"learning_rate": 8.189711611193012e-06, |
|
"loss": 0.877, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.35520770620108366, |
|
"grad_norm": 1.041633913402688, |
|
"learning_rate": 8.149052610141357e-06, |
|
"loss": 0.8655, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.35821794099939797, |
|
"grad_norm": 1.0277280895997796, |
|
"learning_rate": 8.108045500712518e-06, |
|
"loss": 0.8827, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.3612281757977122, |
|
"grad_norm": 1.0193255596334292, |
|
"learning_rate": 8.066694815989961e-06, |
|
"loss": 0.8842, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.36423841059602646, |
|
"grad_norm": 1.0832182052623744, |
|
"learning_rate": 8.025005127037282e-06, |
|
"loss": 0.8823, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.36724864539434077, |
|
"grad_norm": 1.0281183723809044, |
|
"learning_rate": 7.982981042392907e-06, |
|
"loss": 0.8714, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.370258880192655, |
|
"grad_norm": 1.038122520133344, |
|
"learning_rate": 7.940627207560655e-06, |
|
"loss": 0.8657, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.3732691149909693, |
|
"grad_norm": 1.024914409371675, |
|
"learning_rate": 7.897948304496189e-06, |
|
"loss": 0.8892, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.37627934978928357, |
|
"grad_norm": 1.0218762677764746, |
|
"learning_rate": 7.854949051089467e-06, |
|
"loss": 0.8908, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.3792895845875978, |
|
"grad_norm": 1.0227020273788703, |
|
"learning_rate": 7.811634200643202e-06, |
|
"loss": 0.8959, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.3822998193859121, |
|
"grad_norm": 1.0082689155687972, |
|
"learning_rate": 7.768008541347423e-06, |
|
"loss": 0.8526, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.38531005418422637, |
|
"grad_norm": 1.0462978268815175, |
|
"learning_rate": 7.72407689575016e-06, |
|
"loss": 0.8735, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.3883202889825406, |
|
"grad_norm": 1.0709673134969584, |
|
"learning_rate": 7.67984412022434e-06, |
|
"loss": 0.8689, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.3913305237808549, |
|
"grad_norm": 1.060848078120318, |
|
"learning_rate": 7.635315104430959e-06, |
|
"loss": 0.8661, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.39434075857916917, |
|
"grad_norm": 1.0044743257959272, |
|
"learning_rate": 7.5904947707785434e-06, |
|
"loss": 0.8502, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.3973509933774834, |
|
"grad_norm": 1.0265593650109908, |
|
"learning_rate": 7.545388073879018e-06, |
|
"loss": 0.8825, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.4003612281757977, |
|
"grad_norm": 1.0627184843788156, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.8646, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.40337146297411197, |
|
"grad_norm": 1.0318543066146353, |
|
"learning_rate": 7.454335566513603e-06, |
|
"loss": 0.8701, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.4063816977724263, |
|
"grad_norm": 1.083589178780354, |
|
"learning_rate": 7.408399821341787e-06, |
|
"loss": 0.8764, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.4093919325707405, |
|
"grad_norm": 1.0109683298031107, |
|
"learning_rate": 7.362197842398355e-06, |
|
"loss": 0.8595, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.4124021673690548, |
|
"grad_norm": 1.047285422476373, |
|
"learning_rate": 7.315734737027612e-06, |
|
"loss": 0.8668, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.4154124021673691, |
|
"grad_norm": 1.0406374636538134, |
|
"learning_rate": 7.2690156414397775e-06, |
|
"loss": 0.8501, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.4184226369656833, |
|
"grad_norm": 1.0408378978282196, |
|
"learning_rate": 7.22204572014322e-06, |
|
"loss": 0.8541, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.4214328717639976, |
|
"grad_norm": 1.0597504963355342, |
|
"learning_rate": 7.174830165373542e-06, |
|
"loss": 0.8652, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.4244431065623119, |
|
"grad_norm": 1.0165015320978932, |
|
"learning_rate": 7.127374196519616e-06, |
|
"loss": 0.8552, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.4274533413606261, |
|
"grad_norm": 1.0245034045179864, |
|
"learning_rate": 7.079683059546607e-06, |
|
"loss": 0.8647, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.4304635761589404, |
|
"grad_norm": 1.0138247798670463, |
|
"learning_rate": 7.031762026416074e-06, |
|
"loss": 0.8508, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.4334738109572547, |
|
"grad_norm": 1.0798214937909605, |
|
"learning_rate": 6.983616394503177e-06, |
|
"loss": 0.8674, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.43648404575556893, |
|
"grad_norm": 1.0744753655006976, |
|
"learning_rate": 6.9352514860110876e-06, |
|
"loss": 0.8838, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.4394942805538832, |
|
"grad_norm": 1.0193393304874436, |
|
"learning_rate": 6.886672647382653e-06, |
|
"loss": 0.8556, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.4425045153521975, |
|
"grad_norm": 1.0241683235911196, |
|
"learning_rate": 6.837885248709386e-06, |
|
"loss": 0.8869, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.44551475015051173, |
|
"grad_norm": 1.0560789050729424, |
|
"learning_rate": 6.788894683137822e-06, |
|
"loss": 0.8554, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.44852498494882603, |
|
"grad_norm": 1.0320197701869642, |
|
"learning_rate": 6.739706366273346e-06, |
|
"loss": 0.8645, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.4515352197471403, |
|
"grad_norm": 0.9970260367357067, |
|
"learning_rate": 6.690325735581532e-06, |
|
"loss": 0.8502, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.45454545454545453, |
|
"grad_norm": 1.0077402138627367, |
|
"learning_rate": 6.640758249787067e-06, |
|
"loss": 0.867, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.45755568934376883, |
|
"grad_norm": 1.0638747456346818, |
|
"learning_rate": 6.591009388270315e-06, |
|
"loss": 0.8586, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.4605659241420831, |
|
"grad_norm": 1.0750019486293267, |
|
"learning_rate": 6.54108465046161e-06, |
|
"loss": 0.8629, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.46357615894039733, |
|
"grad_norm": 1.0281089034578543, |
|
"learning_rate": 6.490989555233328e-06, |
|
"loss": 0.8567, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.46658639373871164, |
|
"grad_norm": 1.0488620364160015, |
|
"learning_rate": 6.440729640289809e-06, |
|
"loss": 0.8528, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.4695966285370259, |
|
"grad_norm": 1.0249636337962247, |
|
"learning_rate": 6.3903104615551956e-06, |
|
"loss": 0.8458, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.47260686333534013, |
|
"grad_norm": 1.0419830712006013, |
|
"learning_rate": 6.3397375925592675e-06, |
|
"loss": 0.8307, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.47561709813365444, |
|
"grad_norm": 1.0164769677067202, |
|
"learning_rate": 6.289016623821308e-06, |
|
"loss": 0.8565, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.4786273329319687, |
|
"grad_norm": 1.0228613150621668, |
|
"learning_rate": 6.2381531622321234e-06, |
|
"loss": 0.8573, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.481637567730283, |
|
"grad_norm": 1.075852284931363, |
|
"learning_rate": 6.18715283043422e-06, |
|
"loss": 0.8494, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.48464780252859724, |
|
"grad_norm": 1.0349508902074944, |
|
"learning_rate": 6.136021266200271e-06, |
|
"loss": 0.8563, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.4876580373269115, |
|
"grad_norm": 1.0924038198527715, |
|
"learning_rate": 6.084764121809878e-06, |
|
"loss": 0.8479, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.4906682721252258, |
|
"grad_norm": 1.0471168501487311, |
|
"learning_rate": 6.033387063424765e-06, |
|
"loss": 0.8436, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.49367850692354004, |
|
"grad_norm": 1.0224660906723329, |
|
"learning_rate": 5.9818957704624046e-06, |
|
"loss": 0.8654, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.4966887417218543, |
|
"grad_norm": 1.0211655258926158, |
|
"learning_rate": 5.930295934968197e-06, |
|
"loss": 0.8463, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.4996989765201686, |
|
"grad_norm": 1.0195137398082676, |
|
"learning_rate": 5.878593260986256e-06, |
|
"loss": 0.8465, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.5027092113184829, |
|
"grad_norm": 1.0602817409712544, |
|
"learning_rate": 5.8267934639288525e-06, |
|
"loss": 0.8608, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.5057194461167971, |
|
"grad_norm": 1.061046137706525, |
|
"learning_rate": 5.77490226994462e-06, |
|
"loss": 0.8609, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.5087296809151114, |
|
"grad_norm": 1.051889967104703, |
|
"learning_rate": 5.722925415285555e-06, |
|
"loss": 0.8664, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.5117399157134257, |
|
"grad_norm": 1.072317194927684, |
|
"learning_rate": 5.670868645672916e-06, |
|
"loss": 0.8583, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.5147501505117399, |
|
"grad_norm": 1.0465774872570925, |
|
"learning_rate": 5.618737715662067e-06, |
|
"loss": 0.848, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.5177603853100542, |
|
"grad_norm": 1.029456210016313, |
|
"learning_rate": 5.566538388006351e-06, |
|
"loss": 0.8515, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.5207706201083685, |
|
"grad_norm": 1.0018249989098675, |
|
"learning_rate": 5.514276433020044e-06, |
|
"loss": 0.8426, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.5237808549066827, |
|
"grad_norm": 1.0794333658356523, |
|
"learning_rate": 5.461957627940489e-06, |
|
"loss": 0.8411, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.526791089704997, |
|
"grad_norm": 1.0658005957225543, |
|
"learning_rate": 5.409587756289462e-06, |
|
"loss": 0.8372, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.5298013245033113, |
|
"grad_norm": 1.0361101879365158, |
|
"learning_rate": 5.357172607233831e-06, |
|
"loss": 0.8421, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.5328115593016255, |
|
"grad_norm": 1.086943198789344, |
|
"learning_rate": 5.304717974945596e-06, |
|
"loss": 0.8403, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.5358217940999398, |
|
"grad_norm": 1.0481795809429144, |
|
"learning_rate": 5.252229657961394e-06, |
|
"loss": 0.8441, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.5388320288982541, |
|
"grad_norm": 1.0138631327859517, |
|
"learning_rate": 5.199713458541495e-06, |
|
"loss": 0.8506, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.5418422636965683, |
|
"grad_norm": 1.0256732385631195, |
|
"learning_rate": 5.1471751820284e-06, |
|
"loss": 0.8347, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.5448524984948826, |
|
"grad_norm": 1.0356944481021555, |
|
"learning_rate": 5.094620636205096e-06, |
|
"loss": 0.8384, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.5478627332931969, |
|
"grad_norm": 1.022741189017134, |
|
"learning_rate": 5.042055630653042e-06, |
|
"loss": 0.8394, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.5508729680915111, |
|
"grad_norm": 1.0919072566453618, |
|
"learning_rate": 4.98948597610996e-06, |
|
"loss": 0.834, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.5538832028898254, |
|
"grad_norm": 1.037713436516078, |
|
"learning_rate": 4.936917483827483e-06, |
|
"loss": 0.8284, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.5568934376881397, |
|
"grad_norm": 1.0400429976889096, |
|
"learning_rate": 4.884355964928767e-06, |
|
"loss": 0.8635, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.5599036724864539, |
|
"grad_norm": 0.9928760988451867, |
|
"learning_rate": 4.831807229766101e-06, |
|
"loss": 0.8255, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.5629139072847682, |
|
"grad_norm": 1.1246842464041702, |
|
"learning_rate": 4.779277087278615e-06, |
|
"loss": 0.8395, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.5659241420830825, |
|
"grad_norm": 1.0309925607948338, |
|
"learning_rate": 4.7267713443501274e-06, |
|
"loss": 0.8544, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.5689343768813967, |
|
"grad_norm": 1.0124528992084696, |
|
"learning_rate": 4.67429580516724e-06, |
|
"loss": 0.8344, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.571944611679711, |
|
"grad_norm": 1.0217717889794504, |
|
"learning_rate": 4.6218562705777185e-06, |
|
"loss": 0.8295, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.5749548464780253, |
|
"grad_norm": 1.0542480246959662, |
|
"learning_rate": 4.5694585374492314e-06, |
|
"loss": 0.8375, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.5779650812763396, |
|
"grad_norm": 1.046871667593415, |
|
"learning_rate": 4.517108398028566e-06, |
|
"loss": 0.849, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.5809753160746538, |
|
"grad_norm": 1.0378115278601967, |
|
"learning_rate": 4.464811639301314e-06, |
|
"loss": 0.8331, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.5839855508729681, |
|
"grad_norm": 1.0560995260768353, |
|
"learning_rate": 4.412574042352156e-06, |
|
"loss": 0.8472, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.5869957856712824, |
|
"grad_norm": 1.0687109447058267, |
|
"learning_rate": 4.360401381725806e-06, |
|
"loss": 0.8317, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.5900060204695966, |
|
"grad_norm": 1.0523892138754463, |
|
"learning_rate": 4.308299424788667e-06, |
|
"loss": 0.85, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.5930162552679109, |
|
"grad_norm": 1.0187828271643544, |
|
"learning_rate": 4.256273931091284e-06, |
|
"loss": 0.8395, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.5960264900662252, |
|
"grad_norm": 1.0220180896984419, |
|
"learning_rate": 4.204330651731662e-06, |
|
"loss": 0.8329, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.5990367248645394, |
|
"grad_norm": 1.0205596148401899, |
|
"learning_rate": 4.152475328719517e-06, |
|
"loss": 0.831, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.6020469596628537, |
|
"grad_norm": 1.040912209584121, |
|
"learning_rate": 4.1007136943415325e-06, |
|
"loss": 0.8201, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.605057194461168, |
|
"grad_norm": 1.0001879021313027, |
|
"learning_rate": 4.049051470527692e-06, |
|
"loss": 0.8423, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.6080674292594822, |
|
"grad_norm": 1.0398076808150207, |
|
"learning_rate": 3.997494368218745e-06, |
|
"loss": 0.8309, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.6110776640577965, |
|
"grad_norm": 1.0959850286337298, |
|
"learning_rate": 3.946048086734921e-06, |
|
"loss": 0.8471, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.6140878988561108, |
|
"grad_norm": 1.074686489630975, |
|
"learning_rate": 3.894718313145873e-06, |
|
"loss": 0.8361, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.617098133654425, |
|
"grad_norm": 1.033345644977548, |
|
"learning_rate": 3.843510721642036e-06, |
|
"loss": 0.8228, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.6201083684527393, |
|
"grad_norm": 1.0159701920636455, |
|
"learning_rate": 3.7924309729073616e-06, |
|
"loss": 0.8281, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.6231186032510536, |
|
"grad_norm": 1.0886803763608572, |
|
"learning_rate": 3.7414847134935716e-06, |
|
"loss": 0.8282, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.6261288380493678, |
|
"grad_norm": 1.0677088993104988, |
|
"learning_rate": 3.6906775751959667e-06, |
|
"loss": 0.8305, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.6291390728476821, |
|
"grad_norm": 1.0672615560402277, |
|
"learning_rate": 3.640015174430864e-06, |
|
"loss": 0.8131, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.6321493076459964, |
|
"grad_norm": 1.0365030160987536, |
|
"learning_rate": 3.5895031116147355e-06, |
|
"loss": 0.829, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.6351595424443106, |
|
"grad_norm": 1.0441297603160167, |
|
"learning_rate": 3.539146970545124e-06, |
|
"loss": 0.8311, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.6381697772426249, |
|
"grad_norm": 1.0678910916370306, |
|
"learning_rate": 3.488952317783374e-06, |
|
"loss": 0.8251, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.6411800120409392, |
|
"grad_norm": 1.0182442454102976, |
|
"learning_rate": 3.438924702039301e-06, |
|
"loss": 0.824, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.6441902468392534, |
|
"grad_norm": 1.0362344445465297, |
|
"learning_rate": 3.389069653557805e-06, |
|
"loss": 0.8324, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.6472004816375677, |
|
"grad_norm": 1.0321140319577164, |
|
"learning_rate": 3.3393926835075307e-06, |
|
"loss": 0.8293, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.650210716435882, |
|
"grad_norm": 1.0518576177949832, |
|
"learning_rate": 3.289899283371657e-06, |
|
"loss": 0.8164, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.6532209512341962, |
|
"grad_norm": 1.117410399435437, |
|
"learning_rate": 3.240594924340835e-06, |
|
"loss": 0.8215, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.6562311860325105, |
|
"grad_norm": 1.0503940857197025, |
|
"learning_rate": 3.1914850567083866e-06, |
|
"loss": 0.8276, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.6592414208308248, |
|
"grad_norm": 1.0752717357492687, |
|
"learning_rate": 3.1425751092678064e-06, |
|
"loss": 0.8154, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.6622516556291391, |
|
"grad_norm": 1.0233560268626287, |
|
"learning_rate": 3.0938704887126425e-06, |
|
"loss": 0.8094, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.6652618904274533, |
|
"grad_norm": 1.011199570119972, |
|
"learning_rate": 3.045376579038821e-06, |
|
"loss": 0.812, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.6682721252257676, |
|
"grad_norm": 1.0515169328232772, |
|
"learning_rate": 2.9970987409494784e-06, |
|
"loss": 0.8456, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.6712823600240819, |
|
"grad_norm": 1.0464985847353594, |
|
"learning_rate": 2.9490423112623646e-06, |
|
"loss": 0.8166, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.6742925948223961, |
|
"grad_norm": 1.0390545724673643, |
|
"learning_rate": 2.9012126023198973e-06, |
|
"loss": 0.8237, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.6773028296207104, |
|
"grad_norm": 1.0422664657187024, |
|
"learning_rate": 2.853614901401909e-06, |
|
"loss": 0.8044, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.6803130644190247, |
|
"grad_norm": 1.027410279635157, |
|
"learning_rate": 2.806254470141174e-06, |
|
"loss": 0.8261, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.6833232992173389, |
|
"grad_norm": 1.0096212394827309, |
|
"learning_rate": 2.759136543941773e-06, |
|
"loss": 0.8002, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.6863335340156532, |
|
"grad_norm": 1.0314561754682732, |
|
"learning_rate": 2.712266331400332e-06, |
|
"loss": 0.8084, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.6893437688139675, |
|
"grad_norm": 1.0393657468292272, |
|
"learning_rate": 2.66564901373027e-06, |
|
"loss": 0.8203, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.6923540036122817, |
|
"grad_norm": 1.0321818133370786, |
|
"learning_rate": 2.6192897441890337e-06, |
|
"loss": 0.8104, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.695364238410596, |
|
"grad_norm": 1.0333683135026992, |
|
"learning_rate": 2.573193647508426e-06, |
|
"loss": 0.8135, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.6983744732089103, |
|
"grad_norm": 0.9971600689374603, |
|
"learning_rate": 2.5273658193281252e-06, |
|
"loss": 0.8014, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.7013847080072245, |
|
"grad_norm": 1.0714545318723498, |
|
"learning_rate": 2.4818113256323745e-06, |
|
"loss": 0.8178, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.7043949428055388, |
|
"grad_norm": 1.0688985938039246, |
|
"learning_rate": 2.4365352021899635e-06, |
|
"loss": 0.8204, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.7074051776038531, |
|
"grad_norm": 1.0354606458390736, |
|
"learning_rate": 2.391542453997578e-06, |
|
"loss": 0.8089, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.7104154124021673, |
|
"grad_norm": 1.0579010566201743, |
|
"learning_rate": 2.346838054726505e-06, |
|
"loss": 0.8093, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.7134256472004816, |
|
"grad_norm": 1.0289112089155492, |
|
"learning_rate": 2.302426946172836e-06, |
|
"loss": 0.8239, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.7164358819987959, |
|
"grad_norm": 1.0382382418654816, |
|
"learning_rate": 2.258314037711184e-06, |
|
"loss": 0.8158, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.7194461167971101, |
|
"grad_norm": 1.039809551524263, |
|
"learning_rate": 2.214504205751971e-06, |
|
"loss": 0.8324, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.7224563515954244, |
|
"grad_norm": 1.0044596359240077, |
|
"learning_rate": 2.1710022932023805e-06, |
|
"loss": 0.8181, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.7254665863937387, |
|
"grad_norm": 1.0210161092801369, |
|
"learning_rate": 2.127813108931007e-06, |
|
"loss": 0.819, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.7284768211920529, |
|
"grad_norm": 1.0190275126749935, |
|
"learning_rate": 2.084941427236245e-06, |
|
"loss": 0.8215, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.7314870559903672, |
|
"grad_norm": 1.0441883702294767, |
|
"learning_rate": 2.04239198731855e-06, |
|
"loss": 0.8108, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.7344972907886815, |
|
"grad_norm": 1.0305419985469728, |
|
"learning_rate": 2.000169492756523e-06, |
|
"loss": 0.821, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.7375075255869958, |
|
"grad_norm": 1.0474048248439474, |
|
"learning_rate": 1.9582786109869713e-06, |
|
"loss": 0.8173, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.74051776038531, |
|
"grad_norm": 1.047276514055598, |
|
"learning_rate": 1.9167239727889527e-06, |
|
"loss": 0.8182, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.7435279951836243, |
|
"grad_norm": 1.0650577544866475, |
|
"learning_rate": 1.875510171771865e-06, |
|
"loss": 0.8047, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.7465382299819386, |
|
"grad_norm": 1.039137337049237, |
|
"learning_rate": 1.8346417638676533e-06, |
|
"loss": 0.8004, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.7495484647802528, |
|
"grad_norm": 1.0943258261224829, |
|
"learning_rate": 1.7941232668271863e-06, |
|
"loss": 0.8223, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.7525586995785671, |
|
"grad_norm": 1.049969345568919, |
|
"learning_rate": 1.753959159720836e-06, |
|
"loss": 0.8124, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.7555689343768814, |
|
"grad_norm": 1.0323176464169084, |
|
"learning_rate": 1.7141538824433506e-06, |
|
"loss": 0.8163, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.7585791691751956, |
|
"grad_norm": 1.0713783017354832, |
|
"learning_rate": 1.6747118352230495e-06, |
|
"loss": 0.812, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.7615894039735099, |
|
"grad_norm": 1.0307746629765189, |
|
"learning_rate": 1.6356373781354058e-06, |
|
"loss": 0.7918, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.7645996387718242, |
|
"grad_norm": 1.0450820274010493, |
|
"learning_rate": 1.5969348306210692e-06, |
|
"loss": 0.8095, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.7676098735701384, |
|
"grad_norm": 1.0579929378641193, |
|
"learning_rate": 1.5586084710083737e-06, |
|
"loss": 0.8127, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.7706201083684527, |
|
"grad_norm": 1.0235264382567837, |
|
"learning_rate": 1.5206625360403943e-06, |
|
"loss": 0.8056, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.773630343166767, |
|
"grad_norm": 0.9995792397694476, |
|
"learning_rate": 1.4831012204066114e-06, |
|
"loss": 0.8165, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.7766405779650812, |
|
"grad_norm": 1.0207367210720288, |
|
"learning_rate": 1.445928676279199e-06, |
|
"loss": 0.7994, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.7796508127633955, |
|
"grad_norm": 1.0378813038666916, |
|
"learning_rate": 1.4091490128540374e-06, |
|
"loss": 0.7898, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.7826610475617098, |
|
"grad_norm": 1.0170522177446832, |
|
"learning_rate": 1.3727662958964627e-06, |
|
"loss": 0.8059, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.785671282360024, |
|
"grad_norm": 1.002759687451583, |
|
"learning_rate": 1.3367845472918272e-06, |
|
"loss": 0.8145, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.7886815171583383, |
|
"grad_norm": 1.0717784571676352, |
|
"learning_rate": 1.3012077446008969e-06, |
|
"loss": 0.8078, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.7916917519566526, |
|
"grad_norm": 1.0266680645611, |
|
"learning_rate": 1.266039820620159e-06, |
|
"loss": 0.8082, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.7947019867549668, |
|
"grad_norm": 1.0418056632952235, |
|
"learning_rate": 1.2312846629470826e-06, |
|
"loss": 0.7941, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.7977122215532811, |
|
"grad_norm": 1.0330602472030925, |
|
"learning_rate": 1.1969461135503573e-06, |
|
"loss": 0.8089, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.8007224563515954, |
|
"grad_norm": 1.0326801178315057, |
|
"learning_rate": 1.163027968345195e-06, |
|
"loss": 0.8092, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.8037326911499096, |
|
"grad_norm": 1.0596116976868275, |
|
"learning_rate": 1.1295339767737125e-06, |
|
"loss": 0.7975, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.8067429259482239, |
|
"grad_norm": 1.0438113622873293, |
|
"learning_rate": 1.0964678413904529e-06, |
|
"loss": 0.8107, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.8097531607465382, |
|
"grad_norm": 1.0219389795912814, |
|
"learning_rate": 1.0638332174530953e-06, |
|
"loss": 0.8192, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.8127633955448526, |
|
"grad_norm": 1.036771538286032, |
|
"learning_rate": 1.0316337125183817e-06, |
|
"loss": 0.8067, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.8157736303431667, |
|
"grad_norm": 1.025748011070849, |
|
"learning_rate": 9.998728860433277e-07, |
|
"loss": 0.804, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.818783865141481, |
|
"grad_norm": 1.0266024772426974, |
|
"learning_rate": 9.685542489917494e-07, |
|
"loss": 0.7877, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.8217940999397954, |
|
"grad_norm": 1.053826897283154, |
|
"learning_rate": 9.376812634461418e-07, |
|
"loss": 0.796, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.8248043347381095, |
|
"grad_norm": 1.0434406635611175, |
|
"learning_rate": 9.072573422249692e-07, |
|
"loss": 0.8172, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.8278145695364238, |
|
"grad_norm": 1.0537547493539212, |
|
"learning_rate": 8.772858485054042e-07, |
|
"loss": 0.8122, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.8308248043347382, |
|
"grad_norm": 1.0237142125748788, |
|
"learning_rate": 8.477700954515372e-07, |
|
"loss": 0.8084, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.8338350391330523, |
|
"grad_norm": 1.021758110729101, |
|
"learning_rate": 8.187133458481416e-07, |
|
"loss": 0.8096, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.8368452739313667, |
|
"grad_norm": 1.038850029152452, |
|
"learning_rate": 7.901188117399817e-07, |
|
"loss": 0.8146, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.839855508729681, |
|
"grad_norm": 1.056879030430651, |
|
"learning_rate": 7.619896540767435e-07, |
|
"loss": 0.8205, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.8428657435279951, |
|
"grad_norm": 1.0410235485563226, |
|
"learning_rate": 7.343289823636168e-07, |
|
"loss": 0.813, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.8458759783263095, |
|
"grad_norm": 1.059559749232041, |
|
"learning_rate": 7.0713985431755e-07, |
|
"loss": 0.811, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.8488862131246238, |
|
"grad_norm": 1.0070532616145602, |
|
"learning_rate": 6.804252755292429e-07, |
|
"loss": 0.7892, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.851896447922938, |
|
"grad_norm": 1.039370261590547, |
|
"learning_rate": 6.541881991309013e-07, |
|
"loss": 0.8055, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.8549066827212523, |
|
"grad_norm": 1.0424718469075611, |
|
"learning_rate": 6.284315254697726e-07, |
|
"loss": 0.8089, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.8579169175195666, |
|
"grad_norm": 1.032728933766913, |
|
"learning_rate": 6.031581017875482e-07, |
|
"loss": 0.8109, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.8609271523178808, |
|
"grad_norm": 1.0241874679127938, |
|
"learning_rate": 5.783707219056078e-07, |
|
"loss": 0.8056, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.863937387116195, |
|
"grad_norm": 1.0185177289645166, |
|
"learning_rate": 5.540721259161774e-07, |
|
"loss": 0.808, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.8669476219145094, |
|
"grad_norm": 1.0446166216421613, |
|
"learning_rate": 5.302649998794368e-07, |
|
"loss": 0.8157, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.8699578567128236, |
|
"grad_norm": 1.0138337149380894, |
|
"learning_rate": 5.0695197552659e-07, |
|
"loss": 0.7973, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.8729680915111379, |
|
"grad_norm": 1.0269724233473905, |
|
"learning_rate": 4.841356299689359e-07, |
|
"loss": 0.8094, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.8759783263094522, |
|
"grad_norm": 1.0545396346249256, |
|
"learning_rate": 4.618184854129981e-07, |
|
"loss": 0.8127, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.8789885611077664, |
|
"grad_norm": 1.047715081854748, |
|
"learning_rate": 4.4000300888169753e-07, |
|
"loss": 0.8051, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.8819987959060807, |
|
"grad_norm": 1.0395135440264711, |
|
"learning_rate": 4.1869161194164565e-07, |
|
"loss": 0.8084, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.885009030704395, |
|
"grad_norm": 1.0503567677831138, |
|
"learning_rate": 3.9788665043656083e-07, |
|
"loss": 0.8034, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.8880192655027093, |
|
"grad_norm": 1.0207611042877882, |
|
"learning_rate": 3.775904242268391e-07, |
|
"loss": 0.8212, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.8910295003010235, |
|
"grad_norm": 1.0191123841112446, |
|
"learning_rate": 3.578051769353219e-07, |
|
"loss": 0.8124, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.8940397350993378, |
|
"grad_norm": 1.0356740138027982, |
|
"learning_rate": 3.385330956992816e-07, |
|
"loss": 0.8072, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.8970499698976521, |
|
"grad_norm": 1.0311377001693454, |
|
"learning_rate": 3.1977631092863613e-07, |
|
"loss": 0.8078, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.9000602046959663, |
|
"grad_norm": 1.0192271510205593, |
|
"learning_rate": 3.015368960704584e-07, |
|
"loss": 0.792, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.9030704394942806, |
|
"grad_norm": 1.0341426167681713, |
|
"learning_rate": 2.8381686737975867e-07, |
|
"loss": 0.8119, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.9060806742925949, |
|
"grad_norm": 1.0298501396809976, |
|
"learning_rate": 2.666181836966053e-07, |
|
"loss": 0.8128, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 1.0220156234933062, |
|
"learning_rate": 2.4994274622958726e-07, |
|
"loss": 0.8122, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.9121011438892234, |
|
"grad_norm": 1.0132135807820002, |
|
"learning_rate": 2.3379239834564526e-07, |
|
"loss": 0.8037, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.9151113786875377, |
|
"grad_norm": 1.0712029435414339, |
|
"learning_rate": 2.1816892536629775e-07, |
|
"loss": 0.8126, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.9181216134858519, |
|
"grad_norm": 1.0109661008831858, |
|
"learning_rate": 2.0307405437029027e-07, |
|
"loss": 0.7985, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.9211318482841662, |
|
"grad_norm": 1.0503987675093023, |
|
"learning_rate": 1.8850945400266994e-07, |
|
"loss": 0.8154, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.9241420830824805, |
|
"grad_norm": 1.0045462111154957, |
|
"learning_rate": 1.7447673429033361e-07, |
|
"loss": 0.7981, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.9271523178807947, |
|
"grad_norm": 1.010463238088656, |
|
"learning_rate": 1.6097744646404457e-07, |
|
"loss": 0.8032, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.930162552679109, |
|
"grad_norm": 1.041872770124601, |
|
"learning_rate": 1.4801308278695636e-07, |
|
"loss": 0.8071, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.9331727874774233, |
|
"grad_norm": 1.0364425032179982, |
|
"learning_rate": 1.3558507638965158e-07, |
|
"loss": 0.8098, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.9361830222757375, |
|
"grad_norm": 1.0561466427450865, |
|
"learning_rate": 1.2369480111171784e-07, |
|
"loss": 0.8052, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.9391932570740518, |
|
"grad_norm": 1.0225495666991897, |
|
"learning_rate": 1.1234357134987717e-07, |
|
"loss": 0.7969, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.9422034918723661, |
|
"grad_norm": 1.0242932807053133, |
|
"learning_rate": 1.0153264191269052e-07, |
|
"loss": 0.8135, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.9452137266706803, |
|
"grad_norm": 1.042306867935529, |
|
"learning_rate": 9.126320788184374e-08, |
|
"loss": 0.8007, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.9482239614689946, |
|
"grad_norm": 1.0312589182719134, |
|
"learning_rate": 8.153640448003875e-08, |
|
"loss": 0.7923, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.9512341962673089, |
|
"grad_norm": 1.0317620067093864, |
|
"learning_rate": 7.235330694550402e-08, |
|
"loss": 0.8166, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.9542444310656231, |
|
"grad_norm": 1.0205653409861422, |
|
"learning_rate": 6.371493041313126e-08, |
|
"loss": 0.7941, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.9572546658639374, |
|
"grad_norm": 1.0236513464237393, |
|
"learning_rate": 5.562222980225907e-08, |
|
"loss": 0.803, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.9602649006622517, |
|
"grad_norm": 1.0009271094917365, |
|
"learning_rate": 4.807609971111238e-08, |
|
"loss": 0.7929, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.963275135460566, |
|
"grad_norm": 1.0278498971825467, |
|
"learning_rate": 4.107737431791159e-08, |
|
"loss": 0.815, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.9662853702588802, |
|
"grad_norm": 1.0128171451294188, |
|
"learning_rate": 3.462682728865685e-08, |
|
"loss": 0.8038, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.9692956050571945, |
|
"grad_norm": 1.0361890758527714, |
|
"learning_rate": 2.8725171691605934e-08, |
|
"loss": 0.8124, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.9723058398555088, |
|
"grad_norm": 1.0128613768201065, |
|
"learning_rate": 2.3373059918448958e-08, |
|
"loss": 0.7996, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.975316074653823, |
|
"grad_norm": 1.0528001409961834, |
|
"learning_rate": 1.8571083612188845e-08, |
|
"loss": 0.8132, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.9783263094521373, |
|
"grad_norm": 1.0408652799513454, |
|
"learning_rate": 1.431977360173975e-08, |
|
"loss": 0.7996, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.9813365442504516, |
|
"grad_norm": 1.0463227914292674, |
|
"learning_rate": 1.0619599843249006e-08, |
|
"loss": 0.8056, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.9843467790487658, |
|
"grad_norm": 1.047367071297636, |
|
"learning_rate": 7.470971368142011e-09, |
|
"loss": 0.8129, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.9873570138470801, |
|
"grad_norm": 1.0050037805593828, |
|
"learning_rate": 4.874236237911723e-09, |
|
"loss": 0.8035, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.9903672486453944, |
|
"grad_norm": 1.044060059047643, |
|
"learning_rate": 2.8296815056377824e-09, |
|
"loss": 0.8164, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.9933774834437086, |
|
"grad_norm": 1.0565007043243317, |
|
"learning_rate": 1.3375331842574446e-09, |
|
"loss": 0.8285, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.9963877182420229, |
|
"grad_norm": 1.0168335467595218, |
|
"learning_rate": 3.9795622158111945e-10, |
|
"loss": 0.8084, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.9993979530403372, |
|
"grad_norm": 1.0278311447859119, |
|
"learning_rate": 1.1054482056405136e-11, |
|
"loss": 0.8055, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_runtime": 1.2973, |
|
"eval_samples_per_second": 7.708, |
|
"eval_steps_per_second": 2.313, |
|
"step": 1661 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1661, |
|
"total_flos": 161851279147008.0, |
|
"train_loss": 0.8951905027058812, |
|
"train_runtime": 4028.5705, |
|
"train_samples_per_second": 6.596, |
|
"train_steps_per_second": 0.412 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1661, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 161851279147008.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|