|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.11911940976332462, |
|
"global_step": 4000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.7443463702742214e-06, |
|
"loss": 0.9983, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.6776215359677665e-06, |
|
"loss": 0.9472, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.205772220091474e-06, |
|
"loss": 0.8982, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.5758099602275054e-06, |
|
"loss": 0.9054, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.860906353882052e-06, |
|
"loss": 0.8821, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5.071517109647391e-06, |
|
"loss": 0.8922, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5.270189845174036e-06, |
|
"loss": 0.9029, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5.441554849783425e-06, |
|
"loss": 0.9273, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5.592219913271158e-06, |
|
"loss": 0.8706, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5.726651243437972e-06, |
|
"loss": 0.8855, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5.848009334523169e-06, |
|
"loss": 0.8569, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5.958613046622581e-06, |
|
"loss": 0.87, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 6.0602142268914325e-06, |
|
"loss": 0.8761, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 6.1541687207389654e-06, |
|
"loss": 0.8512, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 6.241547407784983e-06, |
|
"loss": 0.8554, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 6.323210857171233e-06, |
|
"loss": 0.8594, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.399861030745893e-06, |
|
"loss": 0.8442, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.465041342203772e-06, |
|
"loss": 0.8634, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.533685310665599e-06, |
|
"loss": 0.8689, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.598752410624751e-06, |
|
"loss": 0.908, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.660597016320156e-06, |
|
"loss": 0.8696, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.719523314515253e-06, |
|
"loss": 0.862, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.775794355733718e-06, |
|
"loss": 0.8188, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.829639152752426e-06, |
|
"loss": 0.8459, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.881258311058162e-06, |
|
"loss": 0.8276, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.930828541168008e-06, |
|
"loss": 0.8474, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.978506309122687e-06, |
|
"loss": 0.8767, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.024430815431458e-06, |
|
"loss": 0.846, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.064366909249716e-06, |
|
"loss": 0.8219, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.107292297340902e-06, |
|
"loss": 0.85, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.148791325892338e-06, |
|
"loss": 0.8484, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.188955746727152e-06, |
|
"loss": 0.8553, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.227868733817176e-06, |
|
"loss": 0.845, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.265605920301813e-06, |
|
"loss": 0.8831, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.302236283380594e-06, |
|
"loss": 0.8332, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.337822903097415e-06, |
|
"loss": 0.8109, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.3724236159865746e-06, |
|
"loss": 0.8536, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.406091580591952e-06, |
|
"loss": 0.8525, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.438875768743633e-06, |
|
"loss": 0.8468, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.47082139398946e-06, |
|
"loss": 0.8792, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.501970286588418e-06, |
|
"loss": 0.8434, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.53236122286999e-06, |
|
"loss": 0.8485, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.562030215465441e-06, |
|
"loss": 0.8467, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.59101076985994e-06, |
|
"loss": 0.8311, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.619334111849203e-06, |
|
"loss": 0.8438, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.647029389772635e-06, |
|
"loss": 0.875, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.674123854806827e-06, |
|
"loss": 0.8308, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.700643022115063e-06, |
|
"loss": 0.8445, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.72661081524147e-06, |
|
"loss": 0.8857, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.752049695797903e-06, |
|
"loss": 0.8448, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.776980780205255e-06, |
|
"loss": 0.827, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.801423945009691e-06, |
|
"loss": 0.7976, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.825397922089763e-06, |
|
"loss": 0.8469, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.848920384896975e-06, |
|
"loss": 0.8477, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.87200802672445e-06, |
|
"loss": 0.819, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.894676631871945e-06, |
|
"loss": 0.822, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.916941140467113e-06, |
|
"loss": 0.8273, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.938815707609735e-06, |
|
"loss": 0.8637, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.960313757425318e-06, |
|
"loss": 0.8359, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.98144803254492e-06, |
|
"loss": 0.8353, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.002230639467933e-06, |
|
"loss": 0.8416, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.022673090212183e-06, |
|
"loss": 0.8389, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.042786340610085e-06, |
|
"loss": 0.835, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.062580825569854e-06, |
|
"loss": 0.8712, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.082066491585883e-06, |
|
"loss": 0.8419, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.101252826751851e-06, |
|
"loss": 0.8674, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.120148888503286e-06, |
|
"loss": 0.8224, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.13876332929263e-06, |
|
"loss": 0.828, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.157104420378997e-06, |
|
"loss": 0.8568, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.17518007389634e-06, |
|
"loss": 0.8216, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.192997863347404e-06, |
|
"loss": 0.7905, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.210565042656325e-06, |
|
"loss": 0.8285, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.22788856389983e-06, |
|
"loss": 0.7934, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.244975093825531e-06, |
|
"loss": 0.8617, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.26183102925549e-06, |
|
"loss": 0.8155, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.278462511464196e-06, |
|
"loss": 0.7782, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.294875439611728e-06, |
|
"loss": 0.8354, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.311075483305743e-06, |
|
"loss": 0.8133, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.32706809435913e-06, |
|
"loss": 0.835, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.342858517804411e-06, |
|
"loss": 0.8339, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.358451802220541e-06, |
|
"loss": 0.8192, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.373852809422983e-06, |
|
"loss": 0.8187, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.389066223563605e-06, |
|
"loss": 0.8202, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.404096559683005e-06, |
|
"loss": 0.8373, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.418948171754334e-06, |
|
"loss": 0.8085, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.433625260254492e-06, |
|
"loss": 0.8292, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.448131879295593e-06, |
|
"loss": 0.8149, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.462471943347044e-06, |
|
"loss": 0.8174, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.476649233576078e-06, |
|
"loss": 0.822, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.49066740383246e-06, |
|
"loss": 0.8188, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.504529986301062e-06, |
|
"loss": 0.8184, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.518240396844167e-06, |
|
"loss": 0.8405, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.531801940053751e-06, |
|
"loss": 0.8127, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.545217814032373e-06, |
|
"loss": 0.8087, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.558491114920063e-06, |
|
"loss": 0.7969, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.571624841183157e-06, |
|
"loss": 0.7868, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.58462189767999e-06, |
|
"loss": 0.803, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.597485099517221e-06, |
|
"loss": 0.8116, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.61021717570959e-06, |
|
"loss": 0.8084, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.622820772655021e-06, |
|
"loss": 0.8238, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.635298457436133e-06, |
|
"loss": 0.8182, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.647652720958468e-06, |
|
"loss": 0.8431, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.659885980935016e-06, |
|
"loss": 0.8601, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.67200058472601e-06, |
|
"loss": 0.8029, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.683998812042295e-06, |
|
"loss": 0.8336, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.695882877520106e-06, |
|
"loss": 0.8158, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.70765493317449e-06, |
|
"loss": 0.7954, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.719317070738179e-06, |
|
"loss": 0.8156, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.73087132389229e-06, |
|
"loss": 0.826, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.742319670394788e-06, |
|
"loss": 0.8407, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.753664034112258e-06, |
|
"loss": 0.8149, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.764906286960285e-06, |
|
"loss": 0.7951, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.776048250757231e-06, |
|
"loss": 0.8036, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.787091698996089e-06, |
|
"loss": 0.8013, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.798038358538676e-06, |
|
"loss": 0.8109, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.808889911236233e-06, |
|
"loss": 0.8091, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.819647995480226e-06, |
|
"loss": 0.799, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.83031420768692e-06, |
|
"loss": 0.8179, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.840890103719111e-06, |
|
"loss": 0.7954, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.851377200248169e-06, |
|
"loss": 0.8444, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.861776976059358e-06, |
|
"loss": 0.8211, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.872090873303281e-06, |
|
"loss": 0.8336, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.88232029869606e-06, |
|
"loss": 0.8114, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.892466624670776e-06, |
|
"loss": 0.7884, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.90253119048252e-06, |
|
"loss": 0.7746, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.912515303269253e-06, |
|
"loss": 0.8045, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.922420239070625e-06, |
|
"loss": 0.8118, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.932247243806712e-06, |
|
"loss": 0.8386, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.941997534218523e-06, |
|
"loss": 0.7903, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.951672298772117e-06, |
|
"loss": 0.803, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.961272698527948e-06, |
|
"loss": 0.7786, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.97079986797706e-06, |
|
"loss": 0.8029, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.980254915845623e-06, |
|
"loss": 0.8261, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.98963892586927e-06, |
|
"loss": 0.7883, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.99895295753853e-06, |
|
"loss": 0.82, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.008198046816687e-06, |
|
"loss": 0.8107, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.017375206831278e-06, |
|
"loss": 0.8036, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.026485428540338e-06, |
|
"loss": 0.7946, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.035529681374568e-06, |
|
"loss": 0.8063, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.044508913856369e-06, |
|
"loss": 0.8009, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.053424054196832e-06, |
|
"loss": 0.8255, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.06227601087153e-06, |
|
"loss": 0.831, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.07106567317608e-06, |
|
"loss": 0.8133, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.079793911762282e-06, |
|
"loss": 0.8305, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.088461579155646e-06, |
|
"loss": 0.8283, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.097069510255097e-06, |
|
"loss": 0.815, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.105618522815576e-06, |
|
"loss": 0.8059, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.114109417914234e-06, |
|
"loss": 0.7878, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.122542980400885e-06, |
|
"loss": 0.8045, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.130919979333354e-06, |
|
"loss": 0.8232, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.139241168398338e-06, |
|
"loss": 0.812, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.14668313197706e-06, |
|
"loss": 0.8241, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.154900305423636e-06, |
|
"loss": 0.8143, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.163063771470579e-06, |
|
"loss": 0.8081, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.171174227623984e-06, |
|
"loss": 0.8189, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.179232357889644e-06, |
|
"loss": 0.8298, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.187238833119211e-06, |
|
"loss": 0.7892, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.195194311345337e-06, |
|
"loss": 0.7882, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.203099438106217e-06, |
|
"loss": 0.8157, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.21095484675992e-06, |
|
"loss": 0.7796, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.21876115878889e-06, |
|
"loss": 0.8145, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.226518984095009e-06, |
|
"loss": 0.8507, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.234228921285531e-06, |
|
"loss": 0.8343, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.241891557950258e-06, |
|
"loss": 0.8324, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.249507470930265e-06, |
|
"loss": 0.8095, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.257077226578481e-06, |
|
"loss": 0.8039, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.264601381012412e-06, |
|
"loss": 0.8153, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.272080480359307e-06, |
|
"loss": 0.8062, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.279515060994034e-06, |
|
"loss": 0.8195, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.286905649769893e-06, |
|
"loss": 0.8274, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.294252764242674e-06, |
|
"loss": 0.8118, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.301556912888123e-06, |
|
"loss": 0.8065, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.308818595313135e-06, |
|
"loss": 0.8255, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.3160383024608e-06, |
|
"loss": 0.7981, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.323216516809576e-06, |
|
"loss": 0.7966, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.330353712566771e-06, |
|
"loss": 0.7948, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.33745035585653e-06, |
|
"loss": 0.8022, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.344506904902502e-06, |
|
"loss": 0.7971, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.35152381020538e-06, |
|
"loss": 0.83, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.358501514715491e-06, |
|
"loss": 0.8204, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.365440454000577e-06, |
|
"loss": 0.7879, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.372341056408958e-06, |
|
"loss": 0.8028, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.379203743228185e-06, |
|
"loss": 0.812, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.386028928839394e-06, |
|
"loss": 0.7885, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.39281702086744e-06, |
|
"loss": 0.8224, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.399568420326976e-06, |
|
"loss": 0.8072, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.406283521764617e-06, |
|
"loss": 0.8267, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.41296271339729e-06, |
|
"loss": 0.8174, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.419606377246914e-06, |
|
"loss": 0.7966, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.42621488927151e-06, |
|
"loss": 0.8085, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.432788619492866e-06, |
|
"loss": 0.7715, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.439327932120854e-06, |
|
"loss": 0.7899, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.445833185674515e-06, |
|
"loss": 0.8234, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.45230473310001e-06, |
|
"loss": 0.7594, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.45874292188551e-06, |
|
"loss": 0.7989, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.465148094173177e-06, |
|
"loss": 0.8325, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.471520586868255e-06, |
|
"loss": 0.784, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.477860731745404e-06, |
|
"loss": 0.7925, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.484168855552354e-06, |
|
"loss": 0.7905, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.490445280110947e-06, |
|
"loss": 0.839, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.496690322415662e-06, |
|
"loss": 0.7983, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.502904294729667e-06, |
|
"loss": 0.7627, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.509087504678532e-06, |
|
"loss": 0.7854, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.515240255341591e-06, |
|
"loss": 0.8019, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.521362845341097e-06, |
|
"loss": 0.8063, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.527455568929186e-06, |
|
"loss": 0.7965, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.533518716072731e-06, |
|
"loss": 0.7828, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.539552572536162e-06, |
|
"loss": 0.7979, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.545557419962264e-06, |
|
"loss": 0.7961, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.551533535951078e-06, |
|
"loss": 0.8017, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.5574811941369e-06, |
|
"loss": 0.7808, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.56340066426346e-06, |
|
"loss": 0.8078, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.569292212257332e-06, |
|
"loss": 0.7956, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.575156100299618e-06, |
|
"loss": 0.792, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.580992586895953e-06, |
|
"loss": 0.8122, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.586801926944894e-06, |
|
"loss": 0.7944, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.592584371804719e-06, |
|
"loss": 0.8075, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.598340169358675e-06, |
|
"loss": 0.8205, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.604069564078745e-06, |
|
"loss": 0.8142, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.609772797087972e-06, |
|
"loss": 0.8124, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.615450106221318e-06, |
|
"loss": 0.8008, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.621101726085202e-06, |
|
"loss": 0.8091, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.626727888115666e-06, |
|
"loss": 0.7764, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.632328820635236e-06, |
|
"loss": 0.8022, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.637904748908531e-06, |
|
"loss": 0.8228, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.643455895196622e-06, |
|
"loss": 0.7955, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.648982478810187e-06, |
|
"loss": 0.7859, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.654484716161512e-06, |
|
"loss": 0.7956, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.659962820815331e-06, |
|
"loss": 0.8379, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.665417003538574e-06, |
|
"loss": 0.7958, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.670847472349013e-06, |
|
"loss": 0.8078, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.676254432562877e-06, |
|
"loss": 0.7764, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.681638086841426e-06, |
|
"loss": 0.7853, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.686998635236535e-06, |
|
"loss": 0.7811, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.69233627523529e-06, |
|
"loss": 0.7867, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.697651201803651e-06, |
|
"loss": 0.8049, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.70294360742919e-06, |
|
"loss": 0.7858, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.708213682162902e-06, |
|
"loss": 0.8043, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.713461613660172e-06, |
|
"loss": 0.7544, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.718687587220866e-06, |
|
"loss": 0.7901, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.723891785828587e-06, |
|
"loss": 0.8242, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.729074390189116e-06, |
|
"loss": 0.8037, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.734235578768077e-06, |
|
"loss": 0.7992, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.739375527827789e-06, |
|
"loss": 0.8321, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.744494411463412e-06, |
|
"loss": 0.7946, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.749592401638308e-06, |
|
"loss": 0.8284, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.754669668218715e-06, |
|
"loss": 0.8193, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.759726379007711e-06, |
|
"loss": 0.7918, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.764762699778483e-06, |
|
"loss": 0.7733, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.76977879430694e-06, |
|
"loss": 0.7545, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.774774824403668e-06, |
|
"loss": 0.7995, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.779750949945259e-06, |
|
"loss": 0.822, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.784707328904993e-06, |
|
"loss": 0.7891, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.789644117382946e-06, |
|
"loss": 0.8081, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.794561469635479e-06, |
|
"loss": 0.7719, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.799459538104166e-06, |
|
"loss": 0.7964, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.804338473444142e-06, |
|
"loss": 0.7849, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.80919842455192e-06, |
|
"loss": 0.7943, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.814039538592641e-06, |
|
"loss": 0.8179, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.81886196102683e-06, |
|
"loss": 0.7809, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.823665835636607e-06, |
|
"loss": 0.8191, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.82845130455142e-06, |
|
"loss": 0.774, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.83321850827327e-06, |
|
"loss": 0.8062, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.83796758570147e-06, |
|
"loss": 0.7829, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.842698674156915e-06, |
|
"loss": 0.7831, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.847411909405928e-06, |
|
"loss": 0.7677, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.852107425683615e-06, |
|
"loss": 0.8183, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.856785355716825e-06, |
|
"loss": 0.7847, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.861445830746643e-06, |
|
"loss": 0.7636, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.866088980550497e-06, |
|
"loss": 0.8071, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.870714933463837e-06, |
|
"loss": 0.8048, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.875323816401418e-06, |
|
"loss": 0.7643, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.879915754878191e-06, |
|
"loss": 0.8177, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.884490873029821e-06, |
|
"loss": 0.8113, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.889049293632811e-06, |
|
"loss": 0.8018, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.893591138124284e-06, |
|
"loss": 0.7857, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.898116526621384e-06, |
|
"loss": 0.7321, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.902625577940345e-06, |
|
"loss": 0.7987, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.907118409615204e-06, |
|
"loss": 0.795, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.911595137916184e-06, |
|
"loss": 0.7824, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.916055877867754e-06, |
|
"loss": 0.7657, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.920500743266357e-06, |
|
"loss": 0.7808, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.924929846697829e-06, |
|
"loss": 0.7936, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.929343299554512e-06, |
|
"loss": 0.7835, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.933741212052067e-06, |
|
"loss": 0.7982, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.938123693245983e-06, |
|
"loss": 0.7795, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.9424908510478e-06, |
|
"loss": 0.7723, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.94684279224107e-06, |
|
"loss": 0.7899, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.951179622497001e-06, |
|
"loss": 0.7733, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.955501446389869e-06, |
|
"loss": 0.7995, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.959808367412143e-06, |
|
"loss": 0.8119, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.964100487989352e-06, |
|
"loss": 0.8068, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.968377909494707e-06, |
|
"loss": 0.7862, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.97264073226346e-06, |
|
"loss": 0.7889, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.976889055607038e-06, |
|
"loss": 0.7854, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.981122977826906e-06, |
|
"loss": 0.7908, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.985342596228237e-06, |
|
"loss": 0.761, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.989548007133313e-06, |
|
"loss": 0.806, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.993739305894728e-06, |
|
"loss": 0.7941, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.997916586908357e-06, |
|
"loss": 0.8242, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8142, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8038, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7903, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7369, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7631, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8063, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7876, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7775, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8087, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7963, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7648, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7854, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8144, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7879, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7973, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7661, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8059, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7926, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8048, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8069, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7716, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7904, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7995, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8062, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7894, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7912, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7735, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7838, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7779, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7471, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7632, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8012, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7941, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7791, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8017, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8195, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7782, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7576, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7975, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7682, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7719, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7625, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7799, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7617, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.762, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8049, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7971, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8157, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7716, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7746, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7614, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7895, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8076, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7599, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7956, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7751, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7697, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7982, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7622, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7716, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7761, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8177, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7722, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7809, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8001, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7641, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7582, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7644, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7709, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7986, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7853, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7782, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.747, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7427, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7813, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7691, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7912, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7849, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7848, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7946, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7852, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7828, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7714, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8218, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8017, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7707, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7678, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7662, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7681, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7641, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7553, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7591, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7863, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7862, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7807, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1e-05, |
|
"loss": 0.777, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7829, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7952, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7829, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7675, |
|
"step": 4000 |
|
} |
|
], |
|
"max_steps": 33579, |
|
"num_train_epochs": 1, |
|
"total_flos": 8.143508134258803e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|