|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 101.0, |
|
"global_step": 5959, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.0001347830758006758, |
|
"loss": 2.4476, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0001277026099836151, |
|
"loss": 2.2935, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00011645752234985997, |
|
"loss": 2.0005, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0001018401897048198, |
|
"loss": 2.1957, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.488061161115849e-05, |
|
"loss": 2.028, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.677383222936865e-05, |
|
"loss": 1.9452, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.8795732412502015e-05, |
|
"loss": 1.9326, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.221312570298567e-05, |
|
"loss": 2.1213, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.81944932507936e-05, |
|
"loss": 2.0756, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.7276476511192e-06, |
|
"loss": 1.9295, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.550127458494346e-06, |
|
"loss": 1.7807, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.722707657413062e-08, |
|
"loss": 2.2416, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.4713240590690364e-06, |
|
"loss": 2.036, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.1434665150939459e-05, |
|
"loss": 1.9217, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.3426119396138327e-05, |
|
"loss": 1.8372, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 3.8600717816522507e-05, |
|
"loss": 1.784, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.5889193527623905e-05, |
|
"loss": 1.8846, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 7.407332684094147e-05, |
|
"loss": 1.8889, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.187178621569686e-05, |
|
"loss": 1.8718, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 0.00010803041634070827, |
|
"loss": 1.8726, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 0.00012141061126329156, |
|
"loss": 1.8162, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 0.00013106954541784705, |
|
"loss": 1.7323, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 0.00013632660913388424, |
|
"loss": 1.8048, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 0.0001368113672944154, |
|
"loss": 1.9135, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 0.00013248966177323044, |
|
"loss": 1.726, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 0.00012366601836206413, |
|
"loss": 1.6579, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 0.00011096218858530879, |
|
"loss": 1.6411, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 9.527333843746984e-05, |
|
"loss": 1.8306, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 7.77049711716633e-05, |
|
"loss": 1.8107, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 5.949502882833675e-05, |
|
"loss": 1.7814, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 4.192666156253025e-05, |
|
"loss": 1.654, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 2.6237811414691256e-05, |
|
"loss": 1.7283, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 1.3533981637935892e-05, |
|
"loss": 1.8214, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 4.710338226769622e-06, |
|
"loss": 1.7687, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 3.886327055845878e-07, |
|
"loss": 1.7183, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 8.733908661157559e-07, |
|
"loss": 1.6793, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 6.130454582152937e-06, |
|
"loss": 1.6161, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 1.5789388736708423e-05, |
|
"loss": 1.6901, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 2.9169583659291692e-05, |
|
"loss": 1.7063, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 4.5328213784303035e-05, |
|
"loss": 1.7222, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 6.312667315905842e-05, |
|
"loss": 1.7548, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 8.1310806472376e-05, |
|
"loss": 1.7133, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 9.859928218347747e-05, |
|
"loss": 1.5574, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 0.00011377388060386165, |
|
"loss": 1.6825, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 0.00012576533484906052, |
|
"loss": 1.6971, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 0.00013372867594093092, |
|
"loss": 1.6711, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 0.00013710277292342587, |
|
"loss": 1.6385, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 0.00013564987254150566, |
|
"loss": 1.6699, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 0.00012947235234888086, |
|
"loss": 1.7898, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 0.00011900550674920642, |
|
"loss": 1.578, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 0.00010498687429701432, |
|
"loss": 1.6301, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 8.840426758749807e-05, |
|
"loss": 1.5338, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 7.042616777063153e-05, |
|
"loss": 1.6108, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 5.231938838884156e-05, |
|
"loss": 1.5748, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 3.535981029518021e-05, |
|
"loss": 1.647, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 2.0742477650140126e-05, |
|
"loss": 1.5056, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 9.497390016384942e-06, |
|
"loss": 1.5544, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 2.416924199324192e-06, |
|
"loss": 1.6663, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 0.0, |
|
"loss": 1.5545, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 2.416924199324169e-06, |
|
"loss": 1.5904, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 9.497390016384903e-06, |
|
"loss": 1.4718, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 2.074247765013998e-05, |
|
"loss": 1.6686, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 3.535981029518024e-05, |
|
"loss": 1.4802, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 5.231938838884147e-05, |
|
"loss": 1.5853, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 7.042616777063145e-05, |
|
"loss": 1.4866, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 8.8404267587498e-05, |
|
"loss": 1.5326, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 0.00010498687429701424, |
|
"loss": 1.5453, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 0.00011900550674920627, |
|
"loss": 1.5737, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 0.00012947235234888078, |
|
"loss": 1.5864, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 0.00013564987254150568, |
|
"loss": 1.5359, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 0.00013710277292342587, |
|
"loss": 1.5031, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 0.000133728675940931, |
|
"loss": 1.5171, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 0.00012576533484906052, |
|
"loss": 1.4829, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 0.00011377388060386172, |
|
"loss": 1.535, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 9.859928218347764e-05, |
|
"loss": 1.6041, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 8.131080647237608e-05, |
|
"loss": 1.5994, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 6.31266731590584e-05, |
|
"loss": 1.4118, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 4.532821378430311e-05, |
|
"loss": 1.5561, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 2.916958365929176e-05, |
|
"loss": 1.469, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 1.578938873670855e-05, |
|
"loss": 1.522, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 6.130454582152975e-06, |
|
"loss": 1.4166, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 8.733908661157864e-07, |
|
"loss": 1.365, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 3.8863270558459543e-07, |
|
"loss": 1.4054, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 4.7103382267695455e-06, |
|
"loss": 1.4166, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 1.3533981637935914e-05, |
|
"loss": 1.4485, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 2.6237811414691188e-05, |
|
"loss": 1.4459, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 4.192666156253006e-05, |
|
"loss": 1.4346, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 5.9495028828336655e-05, |
|
"loss": 1.3644, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 7.770497117166322e-05, |
|
"loss": 1.5092, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 9.527333843746983e-05, |
|
"loss": 1.4805, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 0.00011096218858530872, |
|
"loss": 1.4888, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 0.00012366601836206402, |
|
"loss": 1.3819, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 0.0001324896617732304, |
|
"loss": 1.4309, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 0.0001368113672944154, |
|
"loss": 1.4564, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"learning_rate": 0.00013632660913388424, |
|
"loss": 1.4574, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"learning_rate": 0.00013106954541784708, |
|
"loss": 1.449, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"learning_rate": 0.00012141061126329153, |
|
"loss": 1.3888, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 0.00010803041634070854, |
|
"loss": 1.3585, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"learning_rate": 9.1871786215697e-05, |
|
"loss": 1.3667, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 7.40733268409415e-05, |
|
"loss": 1.4703, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 5.588919352762405e-05, |
|
"loss": 1.4113, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 3.860071781652248e-05, |
|
"loss": 1.447, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"learning_rate": 2.3426119396138205e-05, |
|
"loss": 1.4628, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"learning_rate": 1.1434665150939559e-05, |
|
"loss": 1.3236, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"learning_rate": 3.471324059069044e-06, |
|
"loss": 1.3998, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"learning_rate": 9.722707657414585e-08, |
|
"loss": 1.3596, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"learning_rate": 1.550127458494308e-06, |
|
"loss": 1.4176, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 7.727647651119162e-06, |
|
"loss": 1.3178, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"learning_rate": 1.8194493250793477e-05, |
|
"loss": 1.2094, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"learning_rate": 3.221312570298565e-05, |
|
"loss": 1.2475, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"learning_rate": 4.879573241250212e-05, |
|
"loss": 1.3111, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"learning_rate": 6.677383222936843e-05, |
|
"loss": 1.3479, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"learning_rate": 8.488061161115841e-05, |
|
"loss": 1.3577, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"learning_rate": 0.00010184018970481966, |
|
"loss": 1.3625, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 0.00011645752234985993, |
|
"loss": 1.4285, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"learning_rate": 0.00012770260998361515, |
|
"loss": 1.3736, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"learning_rate": 0.00013478307580067574, |
|
"loss": 1.3182, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 0.0001372, |
|
"loss": 1.3859, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 10.08, |
|
"learning_rate": 0.00013478307580067577, |
|
"loss": 1.3133, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 10.17, |
|
"learning_rate": 0.00012770260998361523, |
|
"loss": 1.2928, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 10.25, |
|
"learning_rate": 0.00011645752234986005, |
|
"loss": 1.3042, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 10.34, |
|
"learning_rate": 0.00010184018970481979, |
|
"loss": 1.2487, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 10.42, |
|
"learning_rate": 8.488061161115881e-05, |
|
"loss": 1.3463, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 10.51, |
|
"learning_rate": 6.677383222936884e-05, |
|
"loss": 1.3703, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 10.59, |
|
"learning_rate": 4.879573241250204e-05, |
|
"loss": 1.3632, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 10.68, |
|
"learning_rate": 3.221312570298559e-05, |
|
"loss": 1.3018, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 10.76, |
|
"learning_rate": 1.8194493250793422e-05, |
|
"loss": 1.309, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 10.85, |
|
"learning_rate": 7.727647651119238e-06, |
|
"loss": 1.2896, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 10.93, |
|
"learning_rate": 1.5501274584943384e-06, |
|
"loss": 1.2617, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 11.02, |
|
"learning_rate": 9.722707657413823e-08, |
|
"loss": 1.2505, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 11.1, |
|
"learning_rate": 3.4713240590689907e-06, |
|
"loss": 1.2368, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 11.19, |
|
"learning_rate": 1.1434665150939459e-05, |
|
"loss": 1.2241, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 11.27, |
|
"learning_rate": 2.3426119396138076e-05, |
|
"loss": 1.2006, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 11.36, |
|
"learning_rate": 3.8600717816522324e-05, |
|
"loss": 1.2346, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 11.44, |
|
"learning_rate": 5.5889193527623885e-05, |
|
"loss": 1.2934, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 11.53, |
|
"learning_rate": 7.407332684094109e-05, |
|
"loss": 1.2511, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 11.61, |
|
"learning_rate": 9.187178621569662e-05, |
|
"loss": 1.291, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 11.69, |
|
"learning_rate": 0.0001080304163407082, |
|
"loss": 1.2377, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 11.78, |
|
"learning_rate": 0.00012141061126329158, |
|
"loss": 1.3313, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 11.86, |
|
"learning_rate": 0.0001310695454178471, |
|
"loss": 1.2796, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 11.95, |
|
"learning_rate": 0.0001363266091338842, |
|
"loss": 1.172, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 12.03, |
|
"learning_rate": 0.0001368113672944154, |
|
"loss": 1.1964, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 12.12, |
|
"learning_rate": 0.00013248966177323038, |
|
"loss": 1.1786, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 12.2, |
|
"learning_rate": 0.00012366601836206426, |
|
"loss": 1.2633, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 12.29, |
|
"learning_rate": 0.00011096218858530884, |
|
"loss": 1.3484, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 12.37, |
|
"learning_rate": 9.527333843746975e-05, |
|
"loss": 1.2342, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 12.46, |
|
"learning_rate": 7.770497117166362e-05, |
|
"loss": 1.2577, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 12.54, |
|
"learning_rate": 5.9495028828336825e-05, |
|
"loss": 1.1856, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 12.63, |
|
"learning_rate": 4.192666156253067e-05, |
|
"loss": 1.1636, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 12.71, |
|
"learning_rate": 2.6237811414691517e-05, |
|
"loss": 1.2436, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 12.8, |
|
"learning_rate": 1.3533981637936021e-05, |
|
"loss": 1.1413, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 12.88, |
|
"learning_rate": 4.7103382267696065e-06, |
|
"loss": 1.2202, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 12.97, |
|
"learning_rate": 3.886327055845878e-07, |
|
"loss": 1.1975, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 13.05, |
|
"learning_rate": 8.73390866115794e-07, |
|
"loss": 1.2147, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 13.14, |
|
"learning_rate": 6.130454582152906e-06, |
|
"loss": 1.1828, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 13.22, |
|
"learning_rate": 1.5789388736708446e-05, |
|
"loss": 1.1669, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 13.31, |
|
"learning_rate": 2.9169583659291425e-05, |
|
"loss": 1.0396, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 13.39, |
|
"learning_rate": 4.5328213784302954e-05, |
|
"loss": 1.144, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 13.47, |
|
"learning_rate": 6.312667315905846e-05, |
|
"loss": 1.1023, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 13.56, |
|
"learning_rate": 8.131080647237566e-05, |
|
"loss": 1.159, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 13.64, |
|
"learning_rate": 9.859928218347728e-05, |
|
"loss": 1.1336, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 13.73, |
|
"learning_rate": 0.00011377388060386158, |
|
"loss": 1.1809, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 13.81, |
|
"learning_rate": 0.00012576533484906028, |
|
"loss": 1.1879, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 13.9, |
|
"learning_rate": 0.00013372867594093086, |
|
"loss": 1.2067, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 13.98, |
|
"learning_rate": 0.00013710277292342587, |
|
"loss": 1.1995, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 14.07, |
|
"learning_rate": 0.00013564987254150566, |
|
"loss": 1.0728, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 14.15, |
|
"learning_rate": 0.00012947235234888075, |
|
"loss": 1.1226, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 14.24, |
|
"learning_rate": 0.00011900550674920657, |
|
"loss": 1.0724, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 14.32, |
|
"learning_rate": 0.00010498687429701438, |
|
"loss": 1.2117, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 14.41, |
|
"learning_rate": 8.840426758749793e-05, |
|
"loss": 1.1809, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 14.49, |
|
"learning_rate": 7.042616777063161e-05, |
|
"loss": 1.1632, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 14.58, |
|
"learning_rate": 5.2319388388841635e-05, |
|
"loss": 1.1441, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 14.66, |
|
"learning_rate": 3.53598102951806e-05, |
|
"loss": 1.0918, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 14.75, |
|
"learning_rate": 2.0742477650140278e-05, |
|
"loss": 1.1435, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 14.83, |
|
"learning_rate": 9.497390016384988e-06, |
|
"loss": 1.1574, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 14.92, |
|
"learning_rate": 2.416924199324215e-06, |
|
"loss": 1.137, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"learning_rate": 0.0, |
|
"loss": 1.1283, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 15.08, |
|
"learning_rate": 2.4169241993241463e-06, |
|
"loss": 1.1263, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 15.17, |
|
"learning_rate": 9.497390016384857e-06, |
|
"loss": 1.0131, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 15.25, |
|
"learning_rate": 2.0742477650140095e-05, |
|
"loss": 1.0513, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 15.34, |
|
"learning_rate": 3.535981029518038e-05, |
|
"loss": 0.9842, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 15.42, |
|
"learning_rate": 5.231938838884139e-05, |
|
"loss": 1.0041, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 15.51, |
|
"learning_rate": 7.042616777063135e-05, |
|
"loss": 1.0598, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 15.59, |
|
"learning_rate": 8.840426758749768e-05, |
|
"loss": 0.9953, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 15.68, |
|
"learning_rate": 0.00010498687429701418, |
|
"loss": 1.1262, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 15.76, |
|
"learning_rate": 0.00011900550674920638, |
|
"loss": 1.1427, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 15.85, |
|
"learning_rate": 0.00012947235234888062, |
|
"loss": 1.0945, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 15.93, |
|
"learning_rate": 0.0001356498725415056, |
|
"loss": 1.1189, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 16.02, |
|
"learning_rate": 0.00013710277292342587, |
|
"loss": 1.1328, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 16.1, |
|
"learning_rate": 0.00013372867594093094, |
|
"loss": 0.9658, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 16.19, |
|
"learning_rate": 0.00012576533484906068, |
|
"loss": 1.0325, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 16.27, |
|
"learning_rate": 0.00011377388060386179, |
|
"loss": 1.0981, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 16.36, |
|
"learning_rate": 9.85992821834775e-05, |
|
"loss": 1.0437, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 16.44, |
|
"learning_rate": 8.131080647237592e-05, |
|
"loss": 1.0232, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 16.53, |
|
"learning_rate": 6.312667315905823e-05, |
|
"loss": 1.0634, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 16.61, |
|
"learning_rate": 4.532821378430365e-05, |
|
"loss": 1.17, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 16.69, |
|
"learning_rate": 2.9169583659292028e-05, |
|
"loss": 1.0492, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 16.78, |
|
"learning_rate": 1.5789388736708606e-05, |
|
"loss": 1.0114, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 16.86, |
|
"learning_rate": 6.130454582153005e-06, |
|
"loss": 1.0472, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 16.95, |
|
"learning_rate": 8.733908661157635e-07, |
|
"loss": 0.9773, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 17.03, |
|
"learning_rate": 3.886327055846107e-07, |
|
"loss": 1.0456, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 17.12, |
|
"learning_rate": 4.710338226769515e-06, |
|
"loss": 1.0436, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 17.2, |
|
"learning_rate": 1.3533981637935869e-05, |
|
"loss": 0.916, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 17.29, |
|
"learning_rate": 2.6237811414691317e-05, |
|
"loss": 0.9274, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 17.37, |
|
"learning_rate": 4.1926661562530436e-05, |
|
"loss": 1.0476, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 17.46, |
|
"learning_rate": 5.949502882833706e-05, |
|
"loss": 1.0242, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 17.54, |
|
"learning_rate": 7.770497117166289e-05, |
|
"loss": 1.001, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 17.63, |
|
"learning_rate": 9.527333843746952e-05, |
|
"loss": 0.9579, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 17.71, |
|
"learning_rate": 0.00011096218858530865, |
|
"loss": 0.9389, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 17.8, |
|
"learning_rate": 0.0001236660183620641, |
|
"loss": 0.9494, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 17.88, |
|
"learning_rate": 0.00013248966177323046, |
|
"loss": 1.0333, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 17.97, |
|
"learning_rate": 0.00013681136729441534, |
|
"loss": 0.991, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 18.05, |
|
"learning_rate": 0.00013632660913388432, |
|
"loss": 1.0615, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 18.14, |
|
"learning_rate": 0.0001310695454178472, |
|
"loss": 1.0309, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 18.22, |
|
"learning_rate": 0.00012141061126329175, |
|
"loss": 0.9265, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 18.31, |
|
"learning_rate": 0.00010803041634070842, |
|
"loss": 1.0156, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 18.39, |
|
"learning_rate": 9.187178621569685e-05, |
|
"loss": 0.8855, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 18.47, |
|
"learning_rate": 7.407332684094182e-05, |
|
"loss": 0.9234, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 18.56, |
|
"learning_rate": 5.588919352762413e-05, |
|
"loss": 1.0271, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 18.64, |
|
"learning_rate": 3.8600717816522554e-05, |
|
"loss": 0.8368, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 18.73, |
|
"learning_rate": 2.3426119396138266e-05, |
|
"loss": 0.9153, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 18.81, |
|
"learning_rate": 1.143466515093933e-05, |
|
"loss": 0.9869, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 18.9, |
|
"learning_rate": 3.471324059069227e-06, |
|
"loss": 0.9458, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 18.98, |
|
"learning_rate": 9.722707657415346e-08, |
|
"loss": 0.9596, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 19.07, |
|
"learning_rate": 1.5501274584942853e-06, |
|
"loss": 0.9193, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 19.15, |
|
"learning_rate": 7.727647651119123e-06, |
|
"loss": 0.8474, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 19.24, |
|
"learning_rate": 1.8194493250793585e-05, |
|
"loss": 0.9306, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 19.32, |
|
"learning_rate": 3.221312570298537e-05, |
|
"loss": 0.8185, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 19.41, |
|
"learning_rate": 4.879573241250181e-05, |
|
"loss": 0.8653, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 19.49, |
|
"learning_rate": 6.67738322293686e-05, |
|
"loss": 0.8867, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 19.58, |
|
"learning_rate": 8.488061161115857e-05, |
|
"loss": 0.9633, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 19.66, |
|
"learning_rate": 0.00010184018970482001, |
|
"loss": 0.9252, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 19.75, |
|
"learning_rate": 0.00011645752234986022, |
|
"loss": 0.8119, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 19.83, |
|
"learning_rate": 0.00012770260998361488, |
|
"loss": 0.9919, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 19.92, |
|
"learning_rate": 0.00013478307580067571, |
|
"loss": 1.0657, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"learning_rate": 0.0001372, |
|
"loss": 0.8867, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 20.08, |
|
"learning_rate": 0.0001347830758006758, |
|
"loss": 0.8722, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 20.17, |
|
"learning_rate": 0.00012770260998361504, |
|
"loss": 0.9027, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 20.25, |
|
"learning_rate": 0.00011645752234986047, |
|
"loss": 0.9142, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 20.34, |
|
"learning_rate": 0.00010184018970482031, |
|
"loss": 0.9368, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 20.42, |
|
"learning_rate": 8.488061161115841e-05, |
|
"loss": 0.9328, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 20.51, |
|
"learning_rate": 6.677383222936893e-05, |
|
"loss": 0.9455, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 20.59, |
|
"learning_rate": 4.879573241250166e-05, |
|
"loss": 0.8107, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 20.68, |
|
"learning_rate": 3.221312570298566e-05, |
|
"loss": 0.8213, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 20.76, |
|
"learning_rate": 1.8194493250793812e-05, |
|
"loss": 0.8629, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 20.85, |
|
"learning_rate": 7.727647651119504e-06, |
|
"loss": 0.8399, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 20.93, |
|
"learning_rate": 1.5501274584943613e-06, |
|
"loss": 0.8685, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 21.02, |
|
"learning_rate": 9.722707657410776e-08, |
|
"loss": 0.8757, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 21.1, |
|
"learning_rate": 3.4713240590691203e-06, |
|
"loss": 0.7704, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 21.19, |
|
"learning_rate": 1.1434665150939413e-05, |
|
"loss": 0.773, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 21.27, |
|
"learning_rate": 2.3426119396138015e-05, |
|
"loss": 0.768, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 21.36, |
|
"learning_rate": 3.860071781652269e-05, |
|
"loss": 0.8515, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 21.44, |
|
"learning_rate": 5.5889193527623803e-05, |
|
"loss": 0.7945, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 21.53, |
|
"learning_rate": 7.407332684094197e-05, |
|
"loss": 0.8015, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 21.61, |
|
"learning_rate": 9.187178621569608e-05, |
|
"loss": 0.8248, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 21.69, |
|
"learning_rate": 0.00010803041634070815, |
|
"loss": 0.8062, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 21.78, |
|
"learning_rate": 0.00012141061126329121, |
|
"loss": 0.85, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 21.86, |
|
"learning_rate": 0.00013106954541784708, |
|
"loss": 0.8513, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 21.95, |
|
"learning_rate": 0.00013632660913388418, |
|
"loss": 0.9707, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 22.03, |
|
"learning_rate": 0.00013681136729441537, |
|
"loss": 0.8592, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 22.12, |
|
"learning_rate": 0.0001324896617732304, |
|
"loss": 0.8644, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 22.2, |
|
"learning_rate": 0.00012366601836206432, |
|
"loss": 0.8634, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 22.29, |
|
"learning_rate": 0.00011096218858530853, |
|
"loss": 0.8125, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 22.37, |
|
"learning_rate": 9.527333843746984e-05, |
|
"loss": 0.864, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 22.46, |
|
"learning_rate": 7.770497117166273e-05, |
|
"loss": 0.8875, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 22.54, |
|
"learning_rate": 5.9495028828337394e-05, |
|
"loss": 0.7048, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 22.63, |
|
"learning_rate": 4.1926661562530294e-05, |
|
"loss": 0.8041, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 22.71, |
|
"learning_rate": 2.6237811414691578e-05, |
|
"loss": 0.8016, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 22.8, |
|
"learning_rate": 1.3533981637935777e-05, |
|
"loss": 0.7686, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 22.88, |
|
"learning_rate": 4.710338226769637e-06, |
|
"loss": 0.7585, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 22.97, |
|
"learning_rate": 3.8863270558464874e-07, |
|
"loss": 0.837, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 23.05, |
|
"learning_rate": 8.733908661156265e-07, |
|
"loss": 0.8028, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 23.14, |
|
"learning_rate": 6.130454582152869e-06, |
|
"loss": 0.6584, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 23.22, |
|
"learning_rate": 1.578938873670808e-05, |
|
"loss": 0.6132, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 23.31, |
|
"learning_rate": 2.9169583659291753e-05, |
|
"loss": 0.693, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 23.39, |
|
"learning_rate": 4.532821378430287e-05, |
|
"loss": 0.7615, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 23.47, |
|
"learning_rate": 6.31266731590579e-05, |
|
"loss": 0.7913, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 23.56, |
|
"learning_rate": 8.131080647237607e-05, |
|
"loss": 0.7448, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 23.64, |
|
"learning_rate": 9.859928218347718e-05, |
|
"loss": 0.7617, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 23.73, |
|
"learning_rate": 0.0001137738806038619, |
|
"loss": 0.719, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 23.81, |
|
"learning_rate": 0.00012576533484906052, |
|
"loss": 0.8524, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 23.9, |
|
"learning_rate": 0.00013372867594093084, |
|
"loss": 0.8465, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 23.98, |
|
"learning_rate": 0.00013710277292342584, |
|
"loss": 0.8268, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 24.07, |
|
"learning_rate": 0.00013564987254150568, |
|
"loss": 0.6952, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 24.15, |
|
"learning_rate": 0.000129472352348881, |
|
"loss": 0.7045, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 24.24, |
|
"learning_rate": 0.0001190055067492063, |
|
"loss": 0.8174, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 24.32, |
|
"learning_rate": 0.00010498687429701445, |
|
"loss": 0.8089, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 24.41, |
|
"learning_rate": 8.840426758749845e-05, |
|
"loss": 0.732, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 24.49, |
|
"learning_rate": 7.04261677706312e-05, |
|
"loss": 0.7616, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 24.58, |
|
"learning_rate": 5.2319388388841716e-05, |
|
"loss": 0.7403, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 24.66, |
|
"learning_rate": 3.5359810295179815e-05, |
|
"loss": 0.7289, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 24.75, |
|
"learning_rate": 2.0742477650139987e-05, |
|
"loss": 0.7526, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 24.83, |
|
"learning_rate": 9.497390016385033e-06, |
|
"loss": 0.6211, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 24.92, |
|
"learning_rate": 2.4169241993243597e-06, |
|
"loss": 0.7174, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.7111, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 25.08, |
|
"learning_rate": 2.4169241993241234e-06, |
|
"loss": 0.6603, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 25.17, |
|
"learning_rate": 9.497390016385064e-06, |
|
"loss": 0.6645, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 25.25, |
|
"learning_rate": 2.0742477650139333e-05, |
|
"loss": 0.6387, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 25.34, |
|
"learning_rate": 3.5359810295179876e-05, |
|
"loss": 0.6582, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 25.42, |
|
"learning_rate": 5.2319388388840835e-05, |
|
"loss": 0.7156, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 25.51, |
|
"learning_rate": 7.042616777063128e-05, |
|
"loss": 0.6633, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 25.59, |
|
"learning_rate": 8.84042675874976e-05, |
|
"loss": 0.6631, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 25.68, |
|
"learning_rate": 0.0001049868742970145, |
|
"loss": 0.7089, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 25.76, |
|
"learning_rate": 0.00011900550674920632, |
|
"loss": 0.6112, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 25.85, |
|
"learning_rate": 0.0001294723523488806, |
|
"loss": 0.7087, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 25.93, |
|
"learning_rate": 0.00013564987254150568, |
|
"loss": 0.7079, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 26.02, |
|
"learning_rate": 0.00013710277292342587, |
|
"loss": 0.7044, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 26.1, |
|
"learning_rate": 0.0001337286759409308, |
|
"loss": 0.6848, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 26.19, |
|
"learning_rate": 0.000125765334849061, |
|
"loss": 0.6899, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 26.27, |
|
"learning_rate": 0.00011377388060386184, |
|
"loss": 0.6688, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 26.36, |
|
"learning_rate": 9.859928218347801e-05, |
|
"loss": 0.6701, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 26.44, |
|
"learning_rate": 8.1310806472376e-05, |
|
"loss": 0.6325, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 26.53, |
|
"learning_rate": 6.31266731590588e-05, |
|
"loss": 0.723, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 26.61, |
|
"learning_rate": 4.5328213784303727e-05, |
|
"loss": 0.6405, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 26.69, |
|
"learning_rate": 2.91695836592917e-05, |
|
"loss": 0.693, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 26.78, |
|
"learning_rate": 1.5789388736708656e-05, |
|
"loss": 0.7138, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 26.86, |
|
"learning_rate": 6.130454582152846e-06, |
|
"loss": 0.6322, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 26.95, |
|
"learning_rate": 8.733908661157712e-07, |
|
"loss": 0.6351, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 27.03, |
|
"learning_rate": 3.8863270558464874e-07, |
|
"loss": 0.7019, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 27.12, |
|
"learning_rate": 4.710338226769309e-06, |
|
"loss": 0.5841, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 27.2, |
|
"learning_rate": 1.3533981637935816e-05, |
|
"loss": 0.6115, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 27.29, |
|
"learning_rate": 2.623781141469087e-05, |
|
"loss": 0.6035, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 27.37, |
|
"learning_rate": 4.1926661562530355e-05, |
|
"loss": 0.563, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 27.46, |
|
"learning_rate": 5.949502882833649e-05, |
|
"loss": 0.5205, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 27.54, |
|
"learning_rate": 7.770497117166281e-05, |
|
"loss": 0.6091, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 27.63, |
|
"learning_rate": 9.5273338437469e-05, |
|
"loss": 0.5453, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 27.71, |
|
"learning_rate": 0.00011096218858530857, |
|
"loss": 0.6827, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 27.8, |
|
"learning_rate": 0.00012366601836206377, |
|
"loss": 0.6791, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 27.88, |
|
"learning_rate": 0.00013248966177323044, |
|
"loss": 0.6586, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 27.97, |
|
"learning_rate": 0.00013681136729441537, |
|
"loss": 0.6288, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 28.05, |
|
"learning_rate": 0.00013632660913388432, |
|
"loss": 0.6334, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 28.14, |
|
"learning_rate": 0.00013106954541784705, |
|
"loss": 0.5685, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 28.22, |
|
"learning_rate": 0.00012141061126329179, |
|
"loss": 0.6338, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 28.31, |
|
"learning_rate": 0.00010803041634070808, |
|
"loss": 0.6357, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 28.39, |
|
"learning_rate": 9.187178621569693e-05, |
|
"loss": 0.6479, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 28.47, |
|
"learning_rate": 7.407332684094189e-05, |
|
"loss": 0.6516, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 28.56, |
|
"learning_rate": 5.588919352762469e-05, |
|
"loss": 0.5449, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 28.64, |
|
"learning_rate": 3.860071781652263e-05, |
|
"loss": 0.5642, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 28.73, |
|
"learning_rate": 2.34261193961387e-05, |
|
"loss": 0.5598, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 28.81, |
|
"learning_rate": 1.1434665150939376e-05, |
|
"loss": 0.5501, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 28.9, |
|
"learning_rate": 3.4713240590690974e-06, |
|
"loss": 0.6496, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 28.98, |
|
"learning_rate": 9.722707657416107e-08, |
|
"loss": 0.6054, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 29.07, |
|
"learning_rate": 1.550127458494369e-06, |
|
"loss": 0.5238, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 29.15, |
|
"learning_rate": 7.727647651119086e-06, |
|
"loss": 0.5338, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 29.24, |
|
"learning_rate": 1.8194493250793856e-05, |
|
"loss": 0.5257, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 29.32, |
|
"learning_rate": 3.2213125702984884e-05, |
|
"loss": 0.4891, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 29.41, |
|
"learning_rate": 4.8795732412501724e-05, |
|
"loss": 0.5434, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 29.49, |
|
"learning_rate": 6.677383222936803e-05, |
|
"loss": 0.4987, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 29.58, |
|
"learning_rate": 8.488061161115847e-05, |
|
"loss": 0.5118, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 29.66, |
|
"learning_rate": 0.0001018401897048195, |
|
"loss": 0.5495, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 29.75, |
|
"learning_rate": 0.00011645752234986016, |
|
"loss": 0.5927, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 29.83, |
|
"learning_rate": 0.00012770260998361507, |
|
"loss": 0.6367, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 29.92, |
|
"learning_rate": 0.0001347830758006757, |
|
"loss": 0.6231, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 30.0, |
|
"learning_rate": 0.0001372, |
|
"loss": 0.5553, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 30.08, |
|
"learning_rate": 0.00013478307580067582, |
|
"loss": 0.6387, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 30.17, |
|
"learning_rate": 0.0001277026099836153, |
|
"loss": 0.5631, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 30.25, |
|
"learning_rate": 0.00011645752234986052, |
|
"loss": 0.5864, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 30.34, |
|
"learning_rate": 0.00010184018970481994, |
|
"loss": 0.5652, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 30.42, |
|
"learning_rate": 8.488061161115898e-05, |
|
"loss": 0.5867, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 30.51, |
|
"learning_rate": 6.677383222936853e-05, |
|
"loss": 0.5299, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 30.59, |
|
"learning_rate": 4.879573241250221e-05, |
|
"loss": 0.4777, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 30.68, |
|
"learning_rate": 3.221312570298531e-05, |
|
"loss": 0.5385, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 30.76, |
|
"learning_rate": 1.8194493250794202e-05, |
|
"loss": 0.5095, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 30.85, |
|
"learning_rate": 7.727647651119314e-06, |
|
"loss": 0.5411, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 30.93, |
|
"learning_rate": 1.5501274584944757e-06, |
|
"loss": 0.4905, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 31.02, |
|
"learning_rate": 9.722707657413062e-08, |
|
"loss": 0.4989, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 31.1, |
|
"learning_rate": 3.4713240590689373e-06, |
|
"loss": 0.4341, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 31.19, |
|
"learning_rate": 1.1434665150939101e-05, |
|
"loss": 0.4691, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 31.27, |
|
"learning_rate": 2.3426119396138317e-05, |
|
"loss": 0.4898, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 31.36, |
|
"learning_rate": 3.8600717816522174e-05, |
|
"loss": 0.4434, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 31.44, |
|
"learning_rate": 5.5889193527624196e-05, |
|
"loss": 0.4916, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 31.53, |
|
"learning_rate": 7.40733268409414e-05, |
|
"loss": 0.4605, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 31.61, |
|
"learning_rate": 9.187178621569647e-05, |
|
"loss": 0.4772, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 31.69, |
|
"learning_rate": 0.00010803041634070767, |
|
"loss": 0.4936, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 31.78, |
|
"learning_rate": 0.00012141061126329147, |
|
"loss": 0.5163, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 31.86, |
|
"learning_rate": 0.00013106954541784683, |
|
"loss": 0.494, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 31.95, |
|
"learning_rate": 0.00013632660913388427, |
|
"loss": 0.5058, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 32.03, |
|
"learning_rate": 0.00013681136729441542, |
|
"loss": 0.5373, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 32.12, |
|
"learning_rate": 0.00013248966177323063, |
|
"loss": 0.532, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 32.2, |
|
"learning_rate": 0.00012366601836206407, |
|
"loss": 0.4622, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 32.29, |
|
"learning_rate": 0.00011096218858530896, |
|
"loss": 0.5211, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 32.37, |
|
"learning_rate": 9.527333843747036e-05, |
|
"loss": 0.4867, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 32.46, |
|
"learning_rate": 7.770497117166331e-05, |
|
"loss": 0.5372, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 32.54, |
|
"learning_rate": 5.9495028828336994e-05, |
|
"loss": 0.4491, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 32.63, |
|
"learning_rate": 4.1926661562529915e-05, |
|
"loss": 0.514, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 32.71, |
|
"learning_rate": 2.6237811414691263e-05, |
|
"loss": 0.4827, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 32.8, |
|
"learning_rate": 1.3533981637936699e-05, |
|
"loss": 0.5054, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 32.88, |
|
"learning_rate": 4.710338226769492e-06, |
|
"loss": 0.5075, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 32.97, |
|
"learning_rate": 3.886327055847097e-07, |
|
"loss": 0.4416, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 33.05, |
|
"learning_rate": 8.733908661158474e-07, |
|
"loss": 0.3942, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 33.14, |
|
"learning_rate": 6.130454582152633e-06, |
|
"loss": 0.4056, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 33.22, |
|
"learning_rate": 1.5789388736707714e-05, |
|
"loss": 0.4238, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 33.31, |
|
"learning_rate": 2.916958365929129e-05, |
|
"loss": 0.4191, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 33.39, |
|
"learning_rate": 4.5328213784302344e-05, |
|
"loss": 0.4243, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 33.47, |
|
"learning_rate": 6.31266731590583e-05, |
|
"loss": 0.4473, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 33.56, |
|
"learning_rate": 8.131080647237551e-05, |
|
"loss": 0.4325, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 33.64, |
|
"learning_rate": 9.859928218347668e-05, |
|
"loss": 0.4047, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 33.73, |
|
"learning_rate": 0.00011377388060386145, |
|
"loss": 0.4184, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 33.81, |
|
"learning_rate": 0.0001257653348490602, |
|
"loss": 0.4285, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 33.9, |
|
"learning_rate": 0.00013372867594093094, |
|
"loss": 0.5034, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 33.98, |
|
"learning_rate": 0.00013710277292342587, |
|
"loss": 0.5398, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 34.07, |
|
"learning_rate": 0.0001356498725415056, |
|
"loss": 0.4968, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 34.15, |
|
"learning_rate": 0.00012947235234888084, |
|
"loss": 0.4584, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 34.24, |
|
"learning_rate": 0.00011900550674920667, |
|
"loss": 0.4254, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 34.32, |
|
"learning_rate": 0.00010498687429701411, |
|
"loss": 0.472, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 34.41, |
|
"learning_rate": 8.840426758749807e-05, |
|
"loss": 0.4033, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 34.49, |
|
"learning_rate": 7.042616777063081e-05, |
|
"loss": 0.4878, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 34.58, |
|
"learning_rate": 5.231938838884132e-05, |
|
"loss": 0.3808, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 34.66, |
|
"learning_rate": 3.535981029518117e-05, |
|
"loss": 0.4661, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 34.75, |
|
"learning_rate": 2.07424776501397e-05, |
|
"loss": 0.4579, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 34.83, |
|
"learning_rate": 9.497390016385323e-06, |
|
"loss": 0.4363, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 34.92, |
|
"learning_rate": 2.416924199324002e-06, |
|
"loss": 0.3991, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 35.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.3743, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 35.08, |
|
"learning_rate": 2.4169241993239714e-06, |
|
"loss": 0.3955, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 35.17, |
|
"learning_rate": 9.497390016384774e-06, |
|
"loss": 0.3526, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 35.25, |
|
"learning_rate": 2.074247765013962e-05, |
|
"loss": 0.3186, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 35.34, |
|
"learning_rate": 3.535981029518023e-05, |
|
"loss": 0.361, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 35.42, |
|
"learning_rate": 5.231938838884122e-05, |
|
"loss": 0.3574, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 35.51, |
|
"learning_rate": 7.04261677706307e-05, |
|
"loss": 0.3685, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 35.59, |
|
"learning_rate": 8.840426758749799e-05, |
|
"loss": 0.3853, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 35.68, |
|
"learning_rate": 0.00010498687429701402, |
|
"loss": 0.3843, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 35.76, |
|
"learning_rate": 0.00011900550674920659, |
|
"loss": 0.4264, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 35.85, |
|
"learning_rate": 0.00012947235234888078, |
|
"loss": 0.439, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 35.93, |
|
"learning_rate": 0.00013564987254150536, |
|
"loss": 0.4012, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 36.02, |
|
"learning_rate": 0.00013710277292342587, |
|
"loss": 0.4393, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 36.1, |
|
"learning_rate": 0.0001337286759409313, |
|
"loss": 0.3709, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 36.19, |
|
"learning_rate": 0.00012576533484906025, |
|
"loss": 0.3955, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 36.27, |
|
"learning_rate": 0.00011377388060386227, |
|
"loss": 0.4227, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 36.36, |
|
"learning_rate": 9.859928218347676e-05, |
|
"loss": 0.4189, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 36.44, |
|
"learning_rate": 8.131080647237657e-05, |
|
"loss": 0.362, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 36.53, |
|
"learning_rate": 6.312667315905937e-05, |
|
"loss": 0.4009, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 36.61, |
|
"learning_rate": 4.532821378430335e-05, |
|
"loss": 0.3754, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 36.69, |
|
"learning_rate": 2.9169583659292163e-05, |
|
"loss": 0.4087, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 36.78, |
|
"learning_rate": 1.57893887367084e-05, |
|
"loss": 0.3784, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 36.86, |
|
"learning_rate": 6.1304545821530815e-06, |
|
"loss": 0.3498, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 36.95, |
|
"learning_rate": 8.733908661158625e-07, |
|
"loss": 0.3977, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 37.03, |
|
"learning_rate": 3.8863270558459543e-07, |
|
"loss": 0.3301, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 37.12, |
|
"learning_rate": 4.710338226769454e-06, |
|
"loss": 0.3393, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 37.2, |
|
"learning_rate": 1.353398163793606e-05, |
|
"loss": 0.336, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 37.29, |
|
"learning_rate": 2.623781141469118e-05, |
|
"loss": 0.304, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 37.37, |
|
"learning_rate": 4.1926661562528925e-05, |
|
"loss": 0.3332, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 37.46, |
|
"learning_rate": 5.949502882833689e-05, |
|
"loss": 0.3265, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 37.54, |
|
"learning_rate": 7.770497117166225e-05, |
|
"loss": 0.2978, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 37.63, |
|
"learning_rate": 9.527333843747026e-05, |
|
"loss": 0.3156, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 37.71, |
|
"learning_rate": 0.00011096218858530812, |
|
"loss": 0.3648, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 37.8, |
|
"learning_rate": 0.00012366601836206342, |
|
"loss": 0.3573, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 37.88, |
|
"learning_rate": 0.00013248966177323022, |
|
"loss": 0.3774, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 37.97, |
|
"learning_rate": 0.00013681136729441532, |
|
"loss": 0.3772, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 38.05, |
|
"learning_rate": 0.00013632660913388427, |
|
"loss": 0.3823, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 38.14, |
|
"learning_rate": 0.0001310695454178473, |
|
"loss": 0.328, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 38.22, |
|
"learning_rate": 0.00012141061126329216, |
|
"loss": 0.3486, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 38.31, |
|
"learning_rate": 0.00010803041634070855, |
|
"loss": 0.3668, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 38.39, |
|
"learning_rate": 9.187178621569747e-05, |
|
"loss": 0.3798, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 38.47, |
|
"learning_rate": 7.40733268409415e-05, |
|
"loss": 0.4158, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 38.56, |
|
"learning_rate": 5.58891935276243e-05, |
|
"loss": 0.3535, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 38.64, |
|
"learning_rate": 3.8600717816523143e-05, |
|
"loss": 0.3979, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 38.73, |
|
"learning_rate": 2.3426119396138395e-05, |
|
"loss": 0.3366, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 38.81, |
|
"learning_rate": 1.1434665150939696e-05, |
|
"loss": 0.3252, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 38.9, |
|
"learning_rate": 3.471324059068968e-06, |
|
"loss": 0.2936, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 38.98, |
|
"learning_rate": 9.722707657413823e-08, |
|
"loss": 0.3053, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 39.07, |
|
"learning_rate": 1.5501274584944604e-06, |
|
"loss": 0.2954, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 39.15, |
|
"learning_rate": 7.727647651119269e-06, |
|
"loss": 0.3342, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 39.24, |
|
"learning_rate": 1.8194493250792806e-05, |
|
"loss": 0.2785, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 39.32, |
|
"learning_rate": 3.221312570298606e-05, |
|
"loss": 0.2683, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 39.41, |
|
"learning_rate": 4.8795732412501175e-05, |
|
"loss": 0.2651, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 39.49, |
|
"learning_rate": 6.67738322293694e-05, |
|
"loss": 0.2762, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 39.58, |
|
"learning_rate": 8.488061161115792e-05, |
|
"loss": 0.3293, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 39.66, |
|
"learning_rate": 0.00010184018970481899, |
|
"loss": 0.33, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 39.75, |
|
"learning_rate": 0.00011645752234985975, |
|
"loss": 0.3273, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 39.83, |
|
"learning_rate": 0.00012770260998361477, |
|
"loss": 0.3042, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 39.92, |
|
"learning_rate": 0.0001347830758006758, |
|
"loss": 0.3306, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"learning_rate": 0.0001372, |
|
"loss": 0.3255, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 40.08, |
|
"learning_rate": 0.00013478307580067598, |
|
"loss": 0.3174, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 40.17, |
|
"learning_rate": 0.00012770260998361512, |
|
"loss": 0.3026, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 40.25, |
|
"learning_rate": 0.00011645752234986024, |
|
"loss": 0.3501, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 40.34, |
|
"learning_rate": 0.00010184018970481959, |
|
"loss": 0.3238, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 40.42, |
|
"learning_rate": 8.488061161115858e-05, |
|
"loss": 0.308, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 40.51, |
|
"learning_rate": 6.677383222937007e-05, |
|
"loss": 0.3262, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 40.59, |
|
"learning_rate": 4.8795732412501825e-05, |
|
"loss": 0.3656, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 40.68, |
|
"learning_rate": 3.221312570298663e-05, |
|
"loss": 0.2899, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 40.76, |
|
"learning_rate": 1.8194493250793927e-05, |
|
"loss": 0.3111, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 40.85, |
|
"learning_rate": 7.727647651119131e-06, |
|
"loss": 0.3155, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 40.93, |
|
"learning_rate": 1.5501274584945974e-06, |
|
"loss": 0.245, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 41.02, |
|
"learning_rate": 9.722707657410015e-08, |
|
"loss": 0.2667, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 41.1, |
|
"learning_rate": 3.471324059069067e-06, |
|
"loss": 0.2208, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 41.19, |
|
"learning_rate": 1.1434665150939863e-05, |
|
"loss": 0.2605, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 41.27, |
|
"learning_rate": 2.3426119396137883e-05, |
|
"loss": 0.2173, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 41.36, |
|
"learning_rate": 3.860071781652254e-05, |
|
"loss": 0.2611, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 41.44, |
|
"learning_rate": 5.5889193527624596e-05, |
|
"loss": 0.235, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 41.53, |
|
"learning_rate": 7.407332684094083e-05, |
|
"loss": 0.2731, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 41.61, |
|
"learning_rate": 9.187178621569684e-05, |
|
"loss": 0.2705, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 41.69, |
|
"learning_rate": 0.0001080304163407072, |
|
"loss": 0.2805, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 41.78, |
|
"learning_rate": 0.00012141061126329236, |
|
"loss": 0.2565, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 41.86, |
|
"learning_rate": 0.000131069545417847, |
|
"loss": 0.3185, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 41.95, |
|
"learning_rate": 0.00013632660913388402, |
|
"loss": 0.3083, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 42.03, |
|
"learning_rate": 0.0001368113672944155, |
|
"loss": 0.3283, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 42.12, |
|
"learning_rate": 0.00013248966177323046, |
|
"loss": 0.282, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 42.2, |
|
"learning_rate": 0.00012366601836206383, |
|
"loss": 0.2761, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 42.29, |
|
"learning_rate": 0.00011096218858530942, |
|
"loss": 0.2853, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 42.37, |
|
"learning_rate": 9.527333843746999e-05, |
|
"loss": 0.3187, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 42.46, |
|
"learning_rate": 7.77049711716629e-05, |
|
"loss": 0.3139, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 42.54, |
|
"learning_rate": 5.949502882833756e-05, |
|
"loss": 0.2639, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 42.63, |
|
"learning_rate": 4.192666156253045e-05, |
|
"loss": 0.2431, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 42.71, |
|
"learning_rate": 2.6237811414690944e-05, |
|
"loss": 0.259, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 42.8, |
|
"learning_rate": 1.3533981637936456e-05, |
|
"loss": 0.276, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 42.88, |
|
"learning_rate": 4.710338226769698e-06, |
|
"loss": 0.2709, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 42.97, |
|
"learning_rate": 3.886327055847706e-07, |
|
"loss": 0.2713, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 43.05, |
|
"learning_rate": 8.733908661159159e-07, |
|
"loss": 0.2457, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 43.14, |
|
"learning_rate": 6.1304545821528e-06, |
|
"loss": 0.2204, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 43.22, |
|
"learning_rate": 1.578938873670735e-05, |
|
"loss": 0.2285, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 43.31, |
|
"learning_rate": 2.9169583659290815e-05, |
|
"loss": 0.2334, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 43.39, |
|
"learning_rate": 4.532821378430272e-05, |
|
"loss": 0.2263, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 43.47, |
|
"learning_rate": 6.31266731590587e-05, |
|
"loss": 0.2353, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 43.56, |
|
"learning_rate": 8.131080647237496e-05, |
|
"loss": 0.2352, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 43.64, |
|
"learning_rate": 9.859928218347703e-05, |
|
"loss": 0.2328, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 43.73, |
|
"learning_rate": 0.00011377388060386175, |
|
"loss": 0.2444, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 43.81, |
|
"learning_rate": 0.0001257653348490599, |
|
"loss": 0.2268, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 43.9, |
|
"learning_rate": 0.00013372867594093078, |
|
"loss": 0.2535, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 43.98, |
|
"learning_rate": 0.00013710277292342587, |
|
"loss": 0.2902, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 44.07, |
|
"learning_rate": 0.0001356498725415055, |
|
"loss": 0.2786, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 44.15, |
|
"learning_rate": 0.00012947235234888108, |
|
"loss": 0.2634, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 44.24, |
|
"learning_rate": 0.00011900550674920639, |
|
"loss": 0.2493, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 44.32, |
|
"learning_rate": 0.00010498687429701377, |
|
"loss": 0.2681, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 44.41, |
|
"learning_rate": 8.840426758749863e-05, |
|
"loss": 0.2863, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 44.49, |
|
"learning_rate": 7.042616777063136e-05, |
|
"loss": 0.2442, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 44.58, |
|
"learning_rate": 5.231938838884094e-05, |
|
"loss": 0.2379, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 44.66, |
|
"learning_rate": 3.535981029518082e-05, |
|
"loss": 0.2382, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 44.75, |
|
"learning_rate": 2.074247765014011e-05, |
|
"loss": 0.24, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 44.83, |
|
"learning_rate": 9.497390016385613e-06, |
|
"loss": 0.217, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 44.92, |
|
"learning_rate": 2.416924199323895e-06, |
|
"loss": 0.2417, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 45.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.2366, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 45.08, |
|
"learning_rate": 2.4169241993238265e-06, |
|
"loss": 0.207, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 45.17, |
|
"learning_rate": 9.497390016384484e-06, |
|
"loss": 0.1959, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 45.25, |
|
"learning_rate": 2.0742477650139912e-05, |
|
"loss": 0.2013, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 45.34, |
|
"learning_rate": 3.535981029518058e-05, |
|
"loss": 0.1915, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 45.42, |
|
"learning_rate": 5.231938838884067e-05, |
|
"loss": 0.2045, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 45.51, |
|
"learning_rate": 7.042616777063111e-05, |
|
"loss": 0.2009, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 45.59, |
|
"learning_rate": 8.840426758749837e-05, |
|
"loss": 0.2131, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 45.68, |
|
"learning_rate": 0.00010498687429701354, |
|
"loss": 0.2117, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 45.76, |
|
"learning_rate": 0.00011900550674920621, |
|
"loss": 0.2232, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 45.85, |
|
"learning_rate": 0.00012947235234888097, |
|
"loss": 0.2327, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 45.93, |
|
"learning_rate": 0.00013564987254150544, |
|
"loss": 0.2621, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 46.02, |
|
"learning_rate": 0.0001371027729234259, |
|
"loss": 0.2101, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 46.1, |
|
"learning_rate": 0.0001337286759409315, |
|
"loss": 0.2214, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 46.19, |
|
"learning_rate": 0.00012576533484906003, |
|
"loss": 0.2503, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 46.27, |
|
"learning_rate": 0.00011377388060386196, |
|
"loss": 0.216, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 46.36, |
|
"learning_rate": 9.859928218347728e-05, |
|
"loss": 0.2487, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 46.44, |
|
"learning_rate": 8.131080647237713e-05, |
|
"loss": 0.2208, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 46.53, |
|
"learning_rate": 6.312667315905896e-05, |
|
"loss": 0.2208, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 46.61, |
|
"learning_rate": 4.532821378430297e-05, |
|
"loss": 0.2331, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 46.69, |
|
"learning_rate": 2.9169583659292638e-05, |
|
"loss": 0.2307, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 46.78, |
|
"learning_rate": 1.5789388736708765e-05, |
|
"loss": 0.2043, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 46.86, |
|
"learning_rate": 6.130454582152914e-06, |
|
"loss": 0.2309, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 46.95, |
|
"learning_rate": 8.73390866115954e-07, |
|
"loss": 0.211, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 47.03, |
|
"learning_rate": 3.886327055845345e-07, |
|
"loss": 0.2177, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 47.12, |
|
"learning_rate": 4.710338226769599e-06, |
|
"loss": 0.1786, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 47.2, |
|
"learning_rate": 1.3533981637936296e-05, |
|
"loss": 0.1993, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 47.29, |
|
"learning_rate": 2.623781141469073e-05, |
|
"loss": 0.1859, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 47.37, |
|
"learning_rate": 4.1926661562528403e-05, |
|
"loss": 0.1549, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 47.46, |
|
"learning_rate": 5.94950288283373e-05, |
|
"loss": 0.1598, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 47.54, |
|
"learning_rate": 7.770497117166265e-05, |
|
"loss": 0.1624, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 47.63, |
|
"learning_rate": 9.527333843746975e-05, |
|
"loss": 0.1806, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 47.71, |
|
"learning_rate": 0.00011096218858530769, |
|
"loss": 0.219, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 47.8, |
|
"learning_rate": 0.00012366601836206366, |
|
"loss": 0.2425, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 47.88, |
|
"learning_rate": 0.00013248966177323036, |
|
"loss": 0.1936, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 47.97, |
|
"learning_rate": 0.00013681136729441526, |
|
"loss": 0.2217, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 48.05, |
|
"learning_rate": 0.00013632660913388435, |
|
"loss": 0.2092, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 48.14, |
|
"learning_rate": 0.0001310695454178471, |
|
"loss": 0.2336, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 48.22, |
|
"learning_rate": 0.00012141061126329252, |
|
"loss": 0.225, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 48.31, |
|
"learning_rate": 0.00010803041634070903, |
|
"loss": 0.2351, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 48.39, |
|
"learning_rate": 9.18717862156971e-05, |
|
"loss": 0.1936, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 48.47, |
|
"learning_rate": 7.40733268409411e-05, |
|
"loss": 0.2027, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 48.56, |
|
"learning_rate": 5.588919352762486e-05, |
|
"loss": 0.1996, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 48.64, |
|
"learning_rate": 3.860071781652278e-05, |
|
"loss": 0.2031, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 48.73, |
|
"learning_rate": 2.342611939613809e-05, |
|
"loss": 0.1813, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 48.81, |
|
"learning_rate": 1.1434665150940007e-05, |
|
"loss": 0.1967, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 48.9, |
|
"learning_rate": 3.4713240590691508e-06, |
|
"loss": 0.1893, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 48.98, |
|
"learning_rate": 9.722707657411538e-08, |
|
"loss": 0.1778, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 49.07, |
|
"learning_rate": 1.550127458494544e-06, |
|
"loss": 0.1728, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 49.15, |
|
"learning_rate": 7.72764765111901e-06, |
|
"loss": 0.1562, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 49.24, |
|
"learning_rate": 1.8194493250792426e-05, |
|
"loss": 0.1671, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 49.32, |
|
"learning_rate": 3.22131257029864e-05, |
|
"loss": 0.162, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 49.41, |
|
"learning_rate": 4.879573241250157e-05, |
|
"loss": 0.1621, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 49.49, |
|
"learning_rate": 6.677383222936883e-05, |
|
"loss": 0.1595, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 49.58, |
|
"learning_rate": 8.488061161115738e-05, |
|
"loss": 0.1635, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 49.66, |
|
"learning_rate": 0.00010184018970481936, |
|
"loss": 0.1674, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 49.75, |
|
"learning_rate": 0.00011645752234986003, |
|
"loss": 0.1613, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 49.83, |
|
"learning_rate": 0.0001277026099836145, |
|
"loss": 0.188, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 49.92, |
|
"learning_rate": 0.00013478307580067566, |
|
"loss": 0.1844, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 50.0, |
|
"learning_rate": 0.0001372, |
|
"loss": 0.1913, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 50.08, |
|
"learning_rate": 0.00013478307580067612, |
|
"loss": 0.1985, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 50.17, |
|
"learning_rate": 0.00012770260998361542, |
|
"loss": 0.1961, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 50.25, |
|
"learning_rate": 0.00011645752234985994, |
|
"loss": 0.2094, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 50.34, |
|
"learning_rate": 0.00010184018970481924, |
|
"loss": 0.1942, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 50.42, |
|
"learning_rate": 8.488061161115913e-05, |
|
"loss": 0.1878, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 50.51, |
|
"learning_rate": 6.677383222937064e-05, |
|
"loss": 0.1811, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 50.59, |
|
"learning_rate": 4.879573241250144e-05, |
|
"loss": 0.1911, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 50.68, |
|
"learning_rate": 3.221312570298629e-05, |
|
"loss": 0.1697, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 50.76, |
|
"learning_rate": 1.8194493250793653e-05, |
|
"loss": 0.1849, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 50.85, |
|
"learning_rate": 7.727647651119846e-06, |
|
"loss": 0.1565, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 50.93, |
|
"learning_rate": 1.5501274584945138e-06, |
|
"loss": 0.1677, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 51.02, |
|
"learning_rate": 9.7227076574123e-08, |
|
"loss": 0.1654, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 51.1, |
|
"learning_rate": 3.4713240590685794e-06, |
|
"loss": 0.1566, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 51.19, |
|
"learning_rate": 1.143466515093901e-05, |
|
"loss": 0.1451, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 51.27, |
|
"learning_rate": 2.3426119396138188e-05, |
|
"loss": 0.1363, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 51.36, |
|
"learning_rate": 3.86007178165229e-05, |
|
"loss": 0.1452, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 51.44, |
|
"learning_rate": 5.588919352762307e-05, |
|
"loss": 0.1347, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 51.53, |
|
"learning_rate": 7.407332684094123e-05, |
|
"loss": 0.1572, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 51.61, |
|
"learning_rate": 9.187178621569722e-05, |
|
"loss": 0.1435, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 51.69, |
|
"learning_rate": 0.00010803041634070754, |
|
"loss": 0.1511, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 51.78, |
|
"learning_rate": 0.00012141061126329137, |
|
"loss": 0.1698, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 51.86, |
|
"learning_rate": 0.00013106954541784719, |
|
"loss": 0.1696, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 51.95, |
|
"learning_rate": 0.00013632660913388408, |
|
"loss": 0.1841, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 52.03, |
|
"learning_rate": 0.00013681136729441545, |
|
"loss": 0.1958, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 52.12, |
|
"learning_rate": 0.00013248966177323033, |
|
"loss": 0.1647, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 52.2, |
|
"learning_rate": 0.00012366601836206358, |
|
"loss": 0.1537, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 52.29, |
|
"learning_rate": 0.00011096218858530911, |
|
"loss": 0.1952, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 52.37, |
|
"learning_rate": 9.527333843747141e-05, |
|
"loss": 0.1742, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 52.46, |
|
"learning_rate": 7.770497117166251e-05, |
|
"loss": 0.1563, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 52.54, |
|
"learning_rate": 5.949502882833716e-05, |
|
"loss": 0.1734, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 52.63, |
|
"learning_rate": 4.192666156253007e-05, |
|
"loss": 0.1661, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 52.71, |
|
"learning_rate": 2.6237811414692164e-05, |
|
"loss": 0.1631, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 52.8, |
|
"learning_rate": 1.353398163793622e-05, |
|
"loss": 0.1648, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 52.88, |
|
"learning_rate": 4.710338226769553e-06, |
|
"loss": 0.1587, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 52.97, |
|
"learning_rate": 3.8863270558472487e-07, |
|
"loss": 0.1612, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 53.05, |
|
"learning_rate": 8.733908661156646e-07, |
|
"loss": 0.1572, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 53.14, |
|
"learning_rate": 6.130454582152967e-06, |
|
"loss": 0.1406, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 53.22, |
|
"learning_rate": 1.5789388736707606e-05, |
|
"loss": 0.1475, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 53.31, |
|
"learning_rate": 2.916958365929115e-05, |
|
"loss": 0.1196, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 53.39, |
|
"learning_rate": 4.5328213784303096e-05, |
|
"loss": 0.1415, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 53.47, |
|
"learning_rate": 6.31266731590591e-05, |
|
"loss": 0.1302, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 53.56, |
|
"learning_rate": 8.131080647237535e-05, |
|
"loss": 0.1331, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 53.64, |
|
"learning_rate": 9.859928218347565e-05, |
|
"loss": 0.1277, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 53.73, |
|
"learning_rate": 0.00011377388060386206, |
|
"loss": 0.1383, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 53.81, |
|
"learning_rate": 0.0001257653348490601, |
|
"loss": 0.1486, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 53.9, |
|
"learning_rate": 0.00013372867594093092, |
|
"loss": 0.1657, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 53.98, |
|
"learning_rate": 0.0001371027729234258, |
|
"loss": 0.1587, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 54.07, |
|
"learning_rate": 0.0001356498725415054, |
|
"loss": 0.1642, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 54.15, |
|
"learning_rate": 0.0001294723523488809, |
|
"loss": 0.1701, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 54.24, |
|
"learning_rate": 0.00011900550674920745, |
|
"loss": 0.1712, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 54.32, |
|
"learning_rate": 0.00010498687429701509, |
|
"loss": 0.1495, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 54.41, |
|
"learning_rate": 8.840426758749825e-05, |
|
"loss": 0.1551, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 54.49, |
|
"learning_rate": 7.042616777063097e-05, |
|
"loss": 0.1707, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 54.58, |
|
"learning_rate": 5.2319388388842434e-05, |
|
"loss": 0.1731, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 54.66, |
|
"learning_rate": 3.535981029518047e-05, |
|
"loss": 0.1576, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 54.75, |
|
"learning_rate": 2.074247765013982e-05, |
|
"loss": 0.1289, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 54.83, |
|
"learning_rate": 9.497390016385406e-06, |
|
"loss": 0.1336, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 54.92, |
|
"learning_rate": 2.4169241993242987e-06, |
|
"loss": 0.1407, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 55.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.1442, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 55.08, |
|
"learning_rate": 2.4169241993239332e-06, |
|
"loss": 0.1158, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 55.17, |
|
"learning_rate": 9.497390016384691e-06, |
|
"loss": 0.1171, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 55.25, |
|
"learning_rate": 2.0742477650138808e-05, |
|
"loss": 0.1272, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 55.34, |
|
"learning_rate": 3.535981029518094e-05, |
|
"loss": 0.1239, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 55.42, |
|
"learning_rate": 5.2319388388841065e-05, |
|
"loss": 0.1224, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 55.51, |
|
"learning_rate": 7.042616777062956e-05, |
|
"loss": 0.1238, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 55.59, |
|
"learning_rate": 8.84042675874969e-05, |
|
"loss": 0.1305, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 55.68, |
|
"learning_rate": 0.00010498687429701388, |
|
"loss": 0.1418, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 55.76, |
|
"learning_rate": 0.00011900550674920648, |
|
"loss": 0.1285, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 55.85, |
|
"learning_rate": 0.00012947235234888027, |
|
"loss": 0.1404, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 55.93, |
|
"learning_rate": 0.00013564987254150552, |
|
"loss": 0.1524, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 56.02, |
|
"learning_rate": 0.00013710277292342587, |
|
"loss": 0.1522, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 56.1, |
|
"learning_rate": 0.00013372867594093135, |
|
"loss": 0.1521, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 56.19, |
|
"learning_rate": 0.0001257653348490609, |
|
"loss": 0.1473, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 56.27, |
|
"learning_rate": 0.00011377388060386166, |
|
"loss": 0.152, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 56.36, |
|
"learning_rate": 9.859928218347691e-05, |
|
"loss": 0.1528, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 56.44, |
|
"learning_rate": 8.131080647237673e-05, |
|
"loss": 0.1561, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 56.53, |
|
"learning_rate": 6.312667315905857e-05, |
|
"loss": 0.1366, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 56.61, |
|
"learning_rate": 4.532821378430259e-05, |
|
"loss": 0.1394, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 56.69, |
|
"learning_rate": 2.9169583659292302e-05, |
|
"loss": 0.1275, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 56.78, |
|
"learning_rate": 1.5789388736708507e-05, |
|
"loss": 0.1409, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 56.86, |
|
"learning_rate": 6.130454582152747e-06, |
|
"loss": 0.1339, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 56.95, |
|
"learning_rate": 8.73390866115893e-07, |
|
"loss": 0.1218, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 57.03, |
|
"learning_rate": 3.8863270558457256e-07, |
|
"loss": 0.1235, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 57.12, |
|
"learning_rate": 4.710338226769036e-06, |
|
"loss": 0.1012, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 57.2, |
|
"learning_rate": 1.353398163793654e-05, |
|
"loss": 0.1109, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 57.29, |
|
"learning_rate": 2.6237811414691053e-05, |
|
"loss": 0.117, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 57.37, |
|
"learning_rate": 4.1926661562528776e-05, |
|
"loss": 0.1091, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 57.46, |
|
"learning_rate": 5.9495028828335754e-05, |
|
"loss": 0.1172, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 57.54, |
|
"learning_rate": 7.770497117166304e-05, |
|
"loss": 0.1116, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 57.63, |
|
"learning_rate": 9.527333843747011e-05, |
|
"loss": 0.1244, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 57.71, |
|
"learning_rate": 0.00011096218858530799, |
|
"loss": 0.1134, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 57.8, |
|
"learning_rate": 0.0001236660183620639, |
|
"loss": 0.1238, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 57.88, |
|
"learning_rate": 0.00013248966177323052, |
|
"loss": 0.1272, |
|
"step": 3415 |
|
}, |
|
{ |
|
"epoch": 57.97, |
|
"learning_rate": 0.0001368113672944153, |
|
"loss": 0.1467, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 58.05, |
|
"learning_rate": 0.0001363266091338843, |
|
"loss": 0.1349, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 58.14, |
|
"learning_rate": 0.00013106954541784694, |
|
"loss": 0.1379, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 58.22, |
|
"learning_rate": 0.00012141061126329227, |
|
"loss": 0.1474, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 58.31, |
|
"learning_rate": 0.00010803041634070869, |
|
"loss": 0.1547, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 58.39, |
|
"learning_rate": 9.187178621569854e-05, |
|
"loss": 0.1392, |
|
"step": 3445 |
|
}, |
|
{ |
|
"epoch": 58.47, |
|
"learning_rate": 7.40733268409407e-05, |
|
"loss": 0.1571, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 58.56, |
|
"learning_rate": 5.588919352762446e-05, |
|
"loss": 0.1301, |
|
"step": 3455 |
|
}, |
|
{ |
|
"epoch": 58.64, |
|
"learning_rate": 3.860071781652417e-05, |
|
"loss": 0.1265, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 58.73, |
|
"learning_rate": 2.3426119396139255e-05, |
|
"loss": 0.1262, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 58.81, |
|
"learning_rate": 1.1434665150939787e-05, |
|
"loss": 0.1344, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 58.9, |
|
"learning_rate": 3.471324059069021e-06, |
|
"loss": 0.1164, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 58.98, |
|
"learning_rate": 9.722707657419916e-08, |
|
"loss": 0.118, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 59.07, |
|
"learning_rate": 1.5501274584942167e-06, |
|
"loss": 0.117, |
|
"step": 3485 |
|
}, |
|
{ |
|
"epoch": 59.15, |
|
"learning_rate": 7.727647651119192e-06, |
|
"loss": 0.1019, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 59.24, |
|
"learning_rate": 1.8194493250792694e-05, |
|
"loss": 0.1007, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 59.32, |
|
"learning_rate": 3.2213125702985094e-05, |
|
"loss": 0.1053, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 59.41, |
|
"learning_rate": 4.8795732412501954e-05, |
|
"loss": 0.0923, |
|
"step": 3505 |
|
}, |
|
{ |
|
"epoch": 59.49, |
|
"learning_rate": 6.677383222936923e-05, |
|
"loss": 0.0997, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 59.58, |
|
"learning_rate": 8.488061161115776e-05, |
|
"loss": 0.091, |
|
"step": 3515 |
|
}, |
|
{ |
|
"epoch": 59.66, |
|
"learning_rate": 0.00010184018970481971, |
|
"loss": 0.106, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 59.75, |
|
"learning_rate": 0.00011645752234986032, |
|
"loss": 0.12, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 59.83, |
|
"learning_rate": 0.0001277026099836147, |
|
"loss": 0.1184, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 59.92, |
|
"learning_rate": 0.00013478307580067577, |
|
"loss": 0.1445, |
|
"step": 3535 |
|
}, |
|
{ |
|
"epoch": 60.0, |
|
"learning_rate": 0.0001372, |
|
"loss": 0.1241, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 60.08, |
|
"learning_rate": 0.000134783075800676, |
|
"loss": 0.1203, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 60.17, |
|
"learning_rate": 0.0001277026099836152, |
|
"loss": 0.1347, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 60.25, |
|
"learning_rate": 0.00011645752234986105, |
|
"loss": 0.1409, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 60.34, |
|
"learning_rate": 0.00010184018970482059, |
|
"loss": 0.125, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 60.42, |
|
"learning_rate": 8.488061161115875e-05, |
|
"loss": 0.128, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 60.51, |
|
"learning_rate": 6.677383222937024e-05, |
|
"loss": 0.131, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 60.59, |
|
"learning_rate": 4.8795732412502916e-05, |
|
"loss": 0.1158, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 60.68, |
|
"learning_rate": 3.221312570298595e-05, |
|
"loss": 0.1166, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 60.76, |
|
"learning_rate": 1.819449325079338e-05, |
|
"loss": 0.1199, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 60.85, |
|
"learning_rate": 7.727647651119657e-06, |
|
"loss": 0.1088, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 60.93, |
|
"learning_rate": 1.55012745849443e-06, |
|
"loss": 0.1035, |
|
"step": 3595 |
|
}, |
|
{ |
|
"epoch": 61.02, |
|
"learning_rate": 9.722707657414585e-08, |
|
"loss": 0.1119, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 61.1, |
|
"learning_rate": 3.471324059068709e-06, |
|
"loss": 0.1008, |
|
"step": 3605 |
|
}, |
|
{ |
|
"epoch": 61.19, |
|
"learning_rate": 1.143466515093923e-05, |
|
"loss": 0.0991, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 61.27, |
|
"learning_rate": 2.3426119396138493e-05, |
|
"loss": 0.0946, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 61.36, |
|
"learning_rate": 3.860071781652325e-05, |
|
"loss": 0.0931, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 61.44, |
|
"learning_rate": 5.588919352762347e-05, |
|
"loss": 0.0919, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 61.53, |
|
"learning_rate": 7.407332684093968e-05, |
|
"loss": 0.0903, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 61.61, |
|
"learning_rate": 9.18717862156976e-05, |
|
"loss": 0.0997, |
|
"step": 3635 |
|
}, |
|
{ |
|
"epoch": 61.69, |
|
"learning_rate": 0.00010803041634070786, |
|
"loss": 0.0884, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 61.78, |
|
"learning_rate": 0.00012141061126329163, |
|
"loss": 0.1136, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 61.86, |
|
"learning_rate": 0.00013106954541784654, |
|
"loss": 0.1208, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 61.95, |
|
"learning_rate": 0.00013632660913388416, |
|
"loss": 0.1254, |
|
"step": 3655 |
|
}, |
|
{ |
|
"epoch": 62.03, |
|
"learning_rate": 0.0001368113672944154, |
|
"loss": 0.1188, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 62.12, |
|
"learning_rate": 0.00013248966177323087, |
|
"loss": 0.1197, |
|
"step": 3665 |
|
}, |
|
{ |
|
"epoch": 62.2, |
|
"learning_rate": 0.0001236660183620645, |
|
"loss": 0.1203, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 62.29, |
|
"learning_rate": 0.00011096218858530879, |
|
"loss": 0.1246, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 62.37, |
|
"learning_rate": 9.527333843747105e-05, |
|
"loss": 0.1079, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 62.46, |
|
"learning_rate": 7.770497117166404e-05, |
|
"loss": 0.1164, |
|
"step": 3685 |
|
}, |
|
{ |
|
"epoch": 62.54, |
|
"learning_rate": 5.9495028828336764e-05, |
|
"loss": 0.1245, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 62.63, |
|
"learning_rate": 4.1926661562529704e-05, |
|
"loss": 0.1111, |
|
"step": 3695 |
|
}, |
|
{ |
|
"epoch": 62.71, |
|
"learning_rate": 2.6237811414691842e-05, |
|
"loss": 0.1016, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 62.8, |
|
"learning_rate": 1.3533981637937141e-05, |
|
"loss": 0.102, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 62.88, |
|
"learning_rate": 4.710338226769408e-06, |
|
"loss": 0.1049, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 62.97, |
|
"learning_rate": 3.886327055846792e-07, |
|
"loss": 0.1034, |
|
"step": 3715 |
|
}, |
|
{ |
|
"epoch": 63.05, |
|
"learning_rate": 8.733908661157332e-07, |
|
"loss": 0.0972, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 63.14, |
|
"learning_rate": 6.130454582152328e-06, |
|
"loss": 0.1074, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 63.22, |
|
"learning_rate": 1.5789388736707867e-05, |
|
"loss": 0.0942, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 63.31, |
|
"learning_rate": 2.916958365929148e-05, |
|
"loss": 0.099, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 63.39, |
|
"learning_rate": 4.532821378430164e-05, |
|
"loss": 0.0895, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 63.47, |
|
"learning_rate": 6.312667315905757e-05, |
|
"loss": 0.0808, |
|
"step": 3745 |
|
}, |
|
{ |
|
"epoch": 63.56, |
|
"learning_rate": 8.131080647237574e-05, |
|
"loss": 0.0872, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 63.64, |
|
"learning_rate": 9.859928218347602e-05, |
|
"loss": 0.0956, |
|
"step": 3755 |
|
}, |
|
{ |
|
"epoch": 63.73, |
|
"learning_rate": 0.00011377388060386089, |
|
"loss": 0.0938, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 63.81, |
|
"learning_rate": 0.00012576533484906033, |
|
"loss": 0.0916, |
|
"step": 3765 |
|
}, |
|
{ |
|
"epoch": 63.9, |
|
"learning_rate": 0.00013372867594093105, |
|
"loss": 0.1239, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 63.98, |
|
"learning_rate": 0.0001371027729234258, |
|
"loss": 0.1043, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 64.07, |
|
"learning_rate": 0.00013564987254150574, |
|
"loss": 0.122, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 64.15, |
|
"learning_rate": 0.00012947235234888073, |
|
"loss": 0.1096, |
|
"step": 3785 |
|
}, |
|
{ |
|
"epoch": 64.24, |
|
"learning_rate": 0.00011900550674920717, |
|
"loss": 0.1126, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 64.32, |
|
"learning_rate": 0.0001049868742970164, |
|
"loss": 0.1065, |
|
"step": 3795 |
|
}, |
|
{ |
|
"epoch": 64.41, |
|
"learning_rate": 8.840426758749784e-05, |
|
"loss": 0.1101, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 64.49, |
|
"learning_rate": 7.042616777063057e-05, |
|
"loss": 0.1059, |
|
"step": 3805 |
|
}, |
|
{ |
|
"epoch": 64.58, |
|
"learning_rate": 5.231938838884205e-05, |
|
"loss": 0.1075, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 64.66, |
|
"learning_rate": 3.535981029518182e-05, |
|
"loss": 0.1053, |
|
"step": 3815 |
|
}, |
|
{ |
|
"epoch": 64.75, |
|
"learning_rate": 2.0742477650140925e-05, |
|
"loss": 0.1078, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 64.83, |
|
"learning_rate": 9.49739001638421e-06, |
|
"loss": 0.1014, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 64.92, |
|
"learning_rate": 2.416924199324192e-06, |
|
"loss": 0.1026, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 65.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.0956, |
|
"step": 3835 |
|
}, |
|
{ |
|
"epoch": 65.08, |
|
"learning_rate": 2.4169241993240324e-06, |
|
"loss": 0.0804, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 65.17, |
|
"learning_rate": 9.497390016383905e-06, |
|
"loss": 0.0904, |
|
"step": 3845 |
|
}, |
|
{ |
|
"epoch": 65.25, |
|
"learning_rate": 2.074247765014049e-05, |
|
"loss": 0.098, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 65.34, |
|
"learning_rate": 3.5359810295179585e-05, |
|
"loss": 0.0852, |
|
"step": 3855 |
|
}, |
|
{ |
|
"epoch": 65.42, |
|
"learning_rate": 5.231938838884145e-05, |
|
"loss": 0.0784, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 65.51, |
|
"learning_rate": 7.042616777062997e-05, |
|
"loss": 0.077, |
|
"step": 3865 |
|
}, |
|
{ |
|
"epoch": 65.59, |
|
"learning_rate": 8.840426758749542e-05, |
|
"loss": 0.0785, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 65.68, |
|
"learning_rate": 0.00010498687429701424, |
|
"loss": 0.0817, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 65.76, |
|
"learning_rate": 0.00011900550674920675, |
|
"loss": 0.098, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 65.85, |
|
"learning_rate": 0.00012947235234888046, |
|
"loss": 0.0944, |
|
"step": 3885 |
|
}, |
|
{ |
|
"epoch": 65.93, |
|
"learning_rate": 0.0001356498725415052, |
|
"loss": 0.1064, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 66.02, |
|
"learning_rate": 0.00013710277292342595, |
|
"loss": 0.1177, |
|
"step": 3895 |
|
}, |
|
{ |
|
"epoch": 66.1, |
|
"learning_rate": 0.00013372867594093062, |
|
"loss": 0.0901, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 66.19, |
|
"learning_rate": 0.00012576533484906066, |
|
"loss": 0.1034, |
|
"step": 3905 |
|
}, |
|
{ |
|
"epoch": 66.27, |
|
"learning_rate": 0.00011377388060386282, |
|
"loss": 0.1063, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 66.36, |
|
"learning_rate": 9.859928218347832e-05, |
|
"loss": 0.1063, |
|
"step": 3915 |
|
}, |
|
{ |
|
"epoch": 66.44, |
|
"learning_rate": 8.131080647237825e-05, |
|
"loss": 0.1231, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 66.53, |
|
"learning_rate": 6.312667315905816e-05, |
|
"loss": 0.1059, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 66.61, |
|
"learning_rate": 4.532821378430405e-05, |
|
"loss": 0.102, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 66.69, |
|
"learning_rate": 2.9169583659291973e-05, |
|
"loss": 0.0961, |
|
"step": 3935 |
|
}, |
|
{ |
|
"epoch": 66.78, |
|
"learning_rate": 1.5789388736709497e-05, |
|
"loss": 0.0941, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 66.86, |
|
"learning_rate": 6.130454582154194e-06, |
|
"loss": 0.0914, |
|
"step": 3945 |
|
}, |
|
{ |
|
"epoch": 66.95, |
|
"learning_rate": 8.733908661158245e-07, |
|
"loss": 0.0927, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 67.03, |
|
"learning_rate": 3.8863270558461825e-07, |
|
"loss": 0.096, |
|
"step": 3955 |
|
}, |
|
{ |
|
"epoch": 67.12, |
|
"learning_rate": 4.710338226769187e-06, |
|
"loss": 0.0744, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 67.2, |
|
"learning_rate": 1.3533981637934452e-05, |
|
"loss": 0.0924, |
|
"step": 3965 |
|
}, |
|
{ |
|
"epoch": 67.29, |
|
"learning_rate": 2.6237811414689833e-05, |
|
"loss": 0.0853, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 67.37, |
|
"learning_rate": 4.192666156253094e-05, |
|
"loss": 0.072, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 67.46, |
|
"learning_rate": 5.949502882833616e-05, |
|
"loss": 0.0702, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 67.54, |
|
"learning_rate": 7.77049711716615e-05, |
|
"loss": 0.0825, |
|
"step": 3985 |
|
}, |
|
{ |
|
"epoch": 67.63, |
|
"learning_rate": 9.527333843746868e-05, |
|
"loss": 0.0771, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 67.71, |
|
"learning_rate": 0.00011096218858530984, |
|
"loss": 0.0807, |
|
"step": 3995 |
|
}, |
|
{ |
|
"epoch": 67.8, |
|
"learning_rate": 0.00012366601836206413, |
|
"loss": 0.0884, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 67.88, |
|
"learning_rate": 0.00013248966177322998, |
|
"loss": 0.087, |
|
"step": 4005 |
|
}, |
|
{ |
|
"epoch": 67.97, |
|
"learning_rate": 0.00013681136729441534, |
|
"loss": 0.0841, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 68.05, |
|
"learning_rate": 0.00013632660913388454, |
|
"loss": 0.106, |
|
"step": 4015 |
|
}, |
|
{ |
|
"epoch": 68.14, |
|
"learning_rate": 0.00013106954541784678, |
|
"loss": 0.0954, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 68.22, |
|
"learning_rate": 0.00012141061126329201, |
|
"loss": 0.0902, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 68.31, |
|
"learning_rate": 0.00010803041634070835, |
|
"loss": 0.1001, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 68.39, |
|
"learning_rate": 9.187178621569818e-05, |
|
"loss": 0.0991, |
|
"step": 4035 |
|
}, |
|
{ |
|
"epoch": 68.47, |
|
"learning_rate": 7.407332684094223e-05, |
|
"loss": 0.0972, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 68.56, |
|
"learning_rate": 5.588919352762407e-05, |
|
"loss": 0.0961, |
|
"step": 4045 |
|
}, |
|
{ |
|
"epoch": 68.64, |
|
"learning_rate": 3.860071781652205e-05, |
|
"loss": 0.0918, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 68.73, |
|
"learning_rate": 2.342611939613895e-05, |
|
"loss": 0.0951, |
|
"step": 4055 |
|
}, |
|
{ |
|
"epoch": 68.81, |
|
"learning_rate": 1.1434665150939565e-05, |
|
"loss": 0.0848, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 68.9, |
|
"learning_rate": 3.4713240590695086e-06, |
|
"loss": 0.0889, |
|
"step": 4065 |
|
}, |
|
{ |
|
"epoch": 68.98, |
|
"learning_rate": 9.722707657406968e-08, |
|
"loss": 0.0915, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 69.07, |
|
"learning_rate": 1.5501274584943003e-06, |
|
"loss": 0.0866, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 69.15, |
|
"learning_rate": 7.727647651119382e-06, |
|
"loss": 0.0738, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 69.24, |
|
"learning_rate": 1.8194493250792965e-05, |
|
"loss": 0.0928, |
|
"step": 4085 |
|
}, |
|
{ |
|
"epoch": 69.32, |
|
"learning_rate": 3.221312570298377e-05, |
|
"loss": 0.0694, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 69.41, |
|
"learning_rate": 4.879573241250233e-05, |
|
"loss": 0.0678, |
|
"step": 4095 |
|
}, |
|
{ |
|
"epoch": 69.49, |
|
"learning_rate": 6.677383222936964e-05, |
|
"loss": 0.0659, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 69.58, |
|
"learning_rate": 8.488061161115815e-05, |
|
"loss": 0.0785, |
|
"step": 4105 |
|
}, |
|
{ |
|
"epoch": 69.66, |
|
"learning_rate": 0.00010184018970481836, |
|
"loss": 0.0811, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 69.75, |
|
"learning_rate": 0.00011645752234985924, |
|
"loss": 0.0769, |
|
"step": 4115 |
|
}, |
|
{ |
|
"epoch": 69.83, |
|
"learning_rate": 0.00012770260998361588, |
|
"loss": 0.0816, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 69.92, |
|
"learning_rate": 0.00013478307580067585, |
|
"loss": 0.0849, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 70.0, |
|
"learning_rate": 0.0001372, |
|
"loss": 0.0908, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 70.08, |
|
"learning_rate": 0.00013478307580067593, |
|
"loss": 0.0923, |
|
"step": 4135 |
|
}, |
|
{ |
|
"epoch": 70.17, |
|
"learning_rate": 0.000127702609983616, |
|
"loss": 0.0909, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 70.25, |
|
"learning_rate": 0.00011645752234985937, |
|
"loss": 0.102, |
|
"step": 4145 |
|
}, |
|
{ |
|
"epoch": 70.34, |
|
"learning_rate": 0.00010184018970482025, |
|
"loss": 0.0877, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 70.42, |
|
"learning_rate": 8.488061161115835e-05, |
|
"loss": 0.0968, |
|
"step": 4155 |
|
}, |
|
{ |
|
"epoch": 70.51, |
|
"learning_rate": 6.677383222936983e-05, |
|
"loss": 0.0911, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 70.59, |
|
"learning_rate": 4.87957324125044e-05, |
|
"loss": 0.0904, |
|
"step": 4165 |
|
}, |
|
{ |
|
"epoch": 70.68, |
|
"learning_rate": 3.22131257029856e-05, |
|
"loss": 0.0836, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 70.76, |
|
"learning_rate": 1.8194493250793104e-05, |
|
"loss": 0.0777, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 70.85, |
|
"learning_rate": 7.727647651119474e-06, |
|
"loss": 0.0776, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 70.93, |
|
"learning_rate": 1.5501274584947573e-06, |
|
"loss": 0.0893, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 71.02, |
|
"learning_rate": 9.722707657406206e-08, |
|
"loss": 0.0805, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 71.1, |
|
"learning_rate": 3.4713240590694476e-06, |
|
"loss": 0.0848, |
|
"step": 4195 |
|
}, |
|
{ |
|
"epoch": 71.19, |
|
"learning_rate": 1.1434665150939452e-05, |
|
"loss": 0.0684, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 71.27, |
|
"learning_rate": 2.3426119396137327e-05, |
|
"loss": 0.0728, |
|
"step": 4205 |
|
}, |
|
{ |
|
"epoch": 71.36, |
|
"learning_rate": 3.860071781652187e-05, |
|
"loss": 0.067, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 71.44, |
|
"learning_rate": 5.5889193527621953e-05, |
|
"loss": 0.0669, |
|
"step": 4215 |
|
}, |
|
{ |
|
"epoch": 71.53, |
|
"learning_rate": 7.407332684094203e-05, |
|
"loss": 0.0603, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 71.61, |
|
"learning_rate": 9.187178621569615e-05, |
|
"loss": 0.0772, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 71.69, |
|
"learning_rate": 0.0001080304163407082, |
|
"loss": 0.0701, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 71.78, |
|
"learning_rate": 0.00012141061126329064, |
|
"loss": 0.0767, |
|
"step": 4235 |
|
}, |
|
{ |
|
"epoch": 71.86, |
|
"learning_rate": 0.00013106954541784589, |
|
"loss": 0.0867, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 71.95, |
|
"learning_rate": 0.0001363266091338842, |
|
"loss": 0.0768, |
|
"step": 4245 |
|
}, |
|
{ |
|
"epoch": 72.03, |
|
"learning_rate": 0.00013681136729441537, |
|
"loss": 0.0877, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 72.12, |
|
"learning_rate": 0.00013248966177323074, |
|
"loss": 0.0935, |
|
"step": 4255 |
|
}, |
|
{ |
|
"epoch": 72.2, |
|
"learning_rate": 0.00012366601836206543, |
|
"loss": 0.0872, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 72.29, |
|
"learning_rate": 0.00011096218858531, |
|
"loss": 0.0952, |
|
"step": 4265 |
|
}, |
|
{ |
|
"epoch": 72.37, |
|
"learning_rate": 9.527333843746887e-05, |
|
"loss": 0.092, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 72.46, |
|
"learning_rate": 7.770497117166364e-05, |
|
"loss": 0.0833, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 72.54, |
|
"learning_rate": 5.9495028828338295e-05, |
|
"loss": 0.0857, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 72.63, |
|
"learning_rate": 4.192666156253113e-05, |
|
"loss": 0.0812, |
|
"step": 4285 |
|
}, |
|
{ |
|
"epoch": 72.71, |
|
"learning_rate": 2.6237811414689992e-05, |
|
"loss": 0.0871, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 72.8, |
|
"learning_rate": 1.353398163793574e-05, |
|
"loss": 0.078, |
|
"step": 4295 |
|
}, |
|
{ |
|
"epoch": 72.88, |
|
"learning_rate": 4.7103382267699724e-06, |
|
"loss": 0.0784, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 72.97, |
|
"learning_rate": 3.886327055846411e-07, |
|
"loss": 0.0798, |
|
"step": 4305 |
|
}, |
|
{ |
|
"epoch": 73.05, |
|
"learning_rate": 8.733908661154818e-07, |
|
"loss": 0.0737, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 73.14, |
|
"learning_rate": 6.130454582153303e-06, |
|
"loss": 0.071, |
|
"step": 4315 |
|
}, |
|
{ |
|
"epoch": 73.22, |
|
"learning_rate": 1.5789388736708124e-05, |
|
"loss": 0.0664, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 73.31, |
|
"learning_rate": 2.9169583659291807e-05, |
|
"loss": 0.0646, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 73.39, |
|
"learning_rate": 4.5328213784302026e-05, |
|
"loss": 0.0692, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 73.47, |
|
"learning_rate": 6.312667315905602e-05, |
|
"loss": 0.0647, |
|
"step": 4335 |
|
}, |
|
{ |
|
"epoch": 73.56, |
|
"learning_rate": 8.131080647237614e-05, |
|
"loss": 0.0666, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 73.64, |
|
"learning_rate": 9.859928218347813e-05, |
|
"loss": 0.074, |
|
"step": 4345 |
|
}, |
|
{ |
|
"epoch": 73.73, |
|
"learning_rate": 0.0001137738806038612, |
|
"loss": 0.0691, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 73.81, |
|
"learning_rate": 0.00012576533484905946, |
|
"loss": 0.0743, |
|
"step": 4355 |
|
}, |
|
{ |
|
"epoch": 73.9, |
|
"learning_rate": 0.00013372867594093057, |
|
"loss": 0.0751, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 73.98, |
|
"learning_rate": 0.00013710277292342595, |
|
"loss": 0.0803, |
|
"step": 4365 |
|
}, |
|
{ |
|
"epoch": 74.07, |
|
"learning_rate": 0.00013564987254150566, |
|
"loss": 0.0772, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 74.15, |
|
"learning_rate": 0.0001294723523488814, |
|
"loss": 0.0829, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 74.24, |
|
"learning_rate": 0.0001190055067492069, |
|
"loss": 0.086, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 74.32, |
|
"learning_rate": 0.00010498687429701605, |
|
"loss": 0.0936, |
|
"step": 4385 |
|
}, |
|
{ |
|
"epoch": 74.41, |
|
"learning_rate": 8.840426758749746e-05, |
|
"loss": 0.0903, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 74.49, |
|
"learning_rate": 7.042616777063211e-05, |
|
"loss": 0.0929, |
|
"step": 4395 |
|
}, |
|
{ |
|
"epoch": 74.58, |
|
"learning_rate": 5.231938838884165e-05, |
|
"loss": 0.0843, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 74.66, |
|
"learning_rate": 3.535981029518147e-05, |
|
"loss": 0.0722, |
|
"step": 4405 |
|
}, |
|
{ |
|
"epoch": 74.75, |
|
"learning_rate": 2.074247765014203e-05, |
|
"loss": 0.0749, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 74.83, |
|
"learning_rate": 9.497390016384994e-06, |
|
"loss": 0.0694, |
|
"step": 4415 |
|
}, |
|
{ |
|
"epoch": 74.92, |
|
"learning_rate": 2.4169241993240857e-06, |
|
"loss": 0.0763, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 75.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.0681, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 75.08, |
|
"learning_rate": 2.4169241993236287e-06, |
|
"loss": 0.0652, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 75.17, |
|
"learning_rate": 9.497390016384112e-06, |
|
"loss": 0.0767, |
|
"step": 4435 |
|
}, |
|
{ |
|
"epoch": 75.25, |
|
"learning_rate": 2.074247765014078e-05, |
|
"loss": 0.0677, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 75.34, |
|
"learning_rate": 3.535981029517994e-05, |
|
"loss": 0.0722, |
|
"step": 4445 |
|
}, |
|
{ |
|
"epoch": 75.42, |
|
"learning_rate": 5.2319388388839954e-05, |
|
"loss": 0.058, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 75.51, |
|
"learning_rate": 7.042616777063036e-05, |
|
"loss": 0.0625, |
|
"step": 4455 |
|
}, |
|
{ |
|
"epoch": 75.59, |
|
"learning_rate": 8.84042675874958e-05, |
|
"loss": 0.0646, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 75.68, |
|
"learning_rate": 0.00010498687429701457, |
|
"loss": 0.0574, |
|
"step": 4465 |
|
}, |
|
{ |
|
"epoch": 75.76, |
|
"learning_rate": 0.00011900550674920571, |
|
"loss": 0.0658, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 75.85, |
|
"learning_rate": 0.00012947235234888062, |
|
"loss": 0.0753, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 75.93, |
|
"learning_rate": 0.00013564987254150528, |
|
"loss": 0.0723, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 76.02, |
|
"learning_rate": 0.00013710277292342592, |
|
"loss": 0.0786, |
|
"step": 4485 |
|
}, |
|
{ |
|
"epoch": 76.1, |
|
"learning_rate": 0.0001337286759409311, |
|
"loss": 0.068, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 76.19, |
|
"learning_rate": 0.00012576533484906044, |
|
"loss": 0.0919, |
|
"step": 4495 |
|
}, |
|
{ |
|
"epoch": 76.27, |
|
"learning_rate": 0.00011377388060386252, |
|
"loss": 0.0829, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 76.36, |
|
"learning_rate": 9.859928218347796e-05, |
|
"loss": 0.0836, |
|
"step": 4505 |
|
}, |
|
{ |
|
"epoch": 76.44, |
|
"learning_rate": 8.131080647237784e-05, |
|
"loss": 0.081, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 76.53, |
|
"learning_rate": 6.312667315905776e-05, |
|
"loss": 0.0725, |
|
"step": 4515 |
|
}, |
|
{ |
|
"epoch": 76.61, |
|
"learning_rate": 4.5328213784303666e-05, |
|
"loss": 0.0789, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 76.69, |
|
"learning_rate": 2.9169583659291645e-05, |
|
"loss": 0.0796, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 76.78, |
|
"learning_rate": 1.5789388736709236e-05, |
|
"loss": 0.075, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 76.86, |
|
"learning_rate": 6.130454582154026e-06, |
|
"loss": 0.0759, |
|
"step": 4535 |
|
}, |
|
{ |
|
"epoch": 76.95, |
|
"learning_rate": 8.733908661157635e-07, |
|
"loss": 0.0688, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 77.03, |
|
"learning_rate": 3.8863270558465636e-07, |
|
"loss": 0.0699, |
|
"step": 4545 |
|
}, |
|
{ |
|
"epoch": 77.12, |
|
"learning_rate": 4.710338226769332e-06, |
|
"loss": 0.0637, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 77.2, |
|
"learning_rate": 1.3533981637934696e-05, |
|
"loss": 0.0541, |
|
"step": 4555 |
|
}, |
|
{ |
|
"epoch": 77.29, |
|
"learning_rate": 2.623781141469015e-05, |
|
"loss": 0.058, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 77.37, |
|
"learning_rate": 4.192666156253132e-05, |
|
"loss": 0.0671, |
|
"step": 4565 |
|
}, |
|
{ |
|
"epoch": 77.46, |
|
"learning_rate": 5.949502882833657e-05, |
|
"loss": 0.0629, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 77.54, |
|
"learning_rate": 7.770497117166192e-05, |
|
"loss": 0.0566, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 77.63, |
|
"learning_rate": 9.527333843746906e-05, |
|
"loss": 0.059, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 77.71, |
|
"learning_rate": 0.00011096218858531017, |
|
"loss": 0.0611, |
|
"step": 4585 |
|
}, |
|
{ |
|
"epoch": 77.8, |
|
"learning_rate": 0.00012366601836206437, |
|
"loss": 0.067, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 77.88, |
|
"learning_rate": 0.0001324896617732301, |
|
"loss": 0.0716, |
|
"step": 4595 |
|
}, |
|
{ |
|
"epoch": 77.97, |
|
"learning_rate": 0.00013681136729441537, |
|
"loss": 0.0714, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 78.05, |
|
"learning_rate": 0.00013632660913388448, |
|
"loss": 0.0818, |
|
"step": 4605 |
|
}, |
|
{ |
|
"epoch": 78.14, |
|
"learning_rate": 0.00013106954541784662, |
|
"loss": 0.0696, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 78.22, |
|
"learning_rate": 0.00012141061126329175, |
|
"loss": 0.0852, |
|
"step": 4615 |
|
}, |
|
{ |
|
"epoch": 78.31, |
|
"learning_rate": 0.00010803041634070802, |
|
"loss": 0.0832, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 78.39, |
|
"learning_rate": 9.187178621569779e-05, |
|
"loss": 0.0826, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 78.47, |
|
"learning_rate": 7.407332684094379e-05, |
|
"loss": 0.0803, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 78.56, |
|
"learning_rate": 5.588919352762366e-05, |
|
"loss": 0.0655, |
|
"step": 4635 |
|
}, |
|
{ |
|
"epoch": 78.64, |
|
"learning_rate": 3.8600717816521687e-05, |
|
"loss": 0.0778, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 78.73, |
|
"learning_rate": 2.3426119396138645e-05, |
|
"loss": 0.0721, |
|
"step": 4645 |
|
}, |
|
{ |
|
"epoch": 78.81, |
|
"learning_rate": 1.1434665150940419e-05, |
|
"loss": 0.0758, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 78.9, |
|
"learning_rate": 3.471324059069379e-06, |
|
"loss": 0.0676, |
|
"step": 4655 |
|
}, |
|
{ |
|
"epoch": 78.98, |
|
"learning_rate": 9.722707657404684e-08, |
|
"loss": 0.0666, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 79.07, |
|
"learning_rate": 1.5501274584943842e-06, |
|
"loss": 0.072, |
|
"step": 4665 |
|
}, |
|
{ |
|
"epoch": 79.15, |
|
"learning_rate": 7.727647651118667e-06, |
|
"loss": 0.0587, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 79.24, |
|
"learning_rate": 1.819449325079324e-05, |
|
"loss": 0.0528, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 79.32, |
|
"learning_rate": 3.221312570298412e-05, |
|
"loss": 0.0603, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 79.41, |
|
"learning_rate": 4.8795732412502726e-05, |
|
"loss": 0.0598, |
|
"step": 4685 |
|
}, |
|
{ |
|
"epoch": 79.49, |
|
"learning_rate": 6.67738322293681e-05, |
|
"loss": 0.0569, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 79.58, |
|
"learning_rate": 8.488061161115856e-05, |
|
"loss": 0.0499, |
|
"step": 4695 |
|
}, |
|
{ |
|
"epoch": 79.66, |
|
"learning_rate": 0.00010184018970481871, |
|
"loss": 0.056, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 79.75, |
|
"learning_rate": 0.0001164575223498581, |
|
"loss": 0.0578, |
|
"step": 4705 |
|
}, |
|
{ |
|
"epoch": 79.83, |
|
"learning_rate": 0.00012770260998361512, |
|
"loss": 0.0639, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 79.92, |
|
"learning_rate": 0.00013478307580067596, |
|
"loss": 0.0654, |
|
"step": 4715 |
|
}, |
|
{ |
|
"epoch": 80.0, |
|
"learning_rate": 0.0001372, |
|
"loss": 0.0753, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 80.08, |
|
"learning_rate": 0.0001347830758006763, |
|
"loss": 0.068, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 80.17, |
|
"learning_rate": 0.00012770260998361577, |
|
"loss": 0.0654, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 80.25, |
|
"learning_rate": 0.00011645752234985907, |
|
"loss": 0.0748, |
|
"step": 4735 |
|
}, |
|
{ |
|
"epoch": 80.34, |
|
"learning_rate": 0.00010184018970481989, |
|
"loss": 0.0768, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 80.42, |
|
"learning_rate": 8.488061161115984e-05, |
|
"loss": 0.081, |
|
"step": 4745 |
|
}, |
|
{ |
|
"epoch": 80.51, |
|
"learning_rate": 6.677383222936944e-05, |
|
"loss": 0.0816, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 80.59, |
|
"learning_rate": 4.879573241250401e-05, |
|
"loss": 0.0761, |
|
"step": 4755 |
|
}, |
|
{ |
|
"epoch": 80.68, |
|
"learning_rate": 3.221312570298526e-05, |
|
"loss": 0.0779, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 80.76, |
|
"learning_rate": 1.8194493250794154e-05, |
|
"loss": 0.0677, |
|
"step": 4765 |
|
}, |
|
{ |
|
"epoch": 80.85, |
|
"learning_rate": 7.727647651119284e-06, |
|
"loss": 0.0545, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 80.93, |
|
"learning_rate": 1.5501274584946737e-06, |
|
"loss": 0.0604, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 81.02, |
|
"learning_rate": 9.722707657397828e-08, |
|
"loss": 0.0588, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 81.1, |
|
"learning_rate": 3.47132405906896e-06, |
|
"loss": 0.0608, |
|
"step": 4785 |
|
}, |
|
{ |
|
"epoch": 81.19, |
|
"learning_rate": 1.1434665150939672e-05, |
|
"loss": 0.0524, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 81.27, |
|
"learning_rate": 2.3426119396137632e-05, |
|
"loss": 0.0672, |
|
"step": 4795 |
|
}, |
|
{ |
|
"epoch": 81.36, |
|
"learning_rate": 3.860071781652048e-05, |
|
"loss": 0.0474, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 81.44, |
|
"learning_rate": 5.5889193527622347e-05, |
|
"loss": 0.0541, |
|
"step": 4805 |
|
}, |
|
{ |
|
"epoch": 81.53, |
|
"learning_rate": 7.40733268409405e-05, |
|
"loss": 0.0498, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 81.61, |
|
"learning_rate": 9.187178621569835e-05, |
|
"loss": 0.0455, |
|
"step": 4815 |
|
}, |
|
{ |
|
"epoch": 81.69, |
|
"learning_rate": 0.00010803041634070853, |
|
"loss": 0.0539, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 81.78, |
|
"learning_rate": 0.0001214106112632909, |
|
"loss": 0.0573, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 81.86, |
|
"learning_rate": 0.00013106954541784605, |
|
"loss": 0.0609, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 81.95, |
|
"learning_rate": 0.00013632660913388427, |
|
"loss": 0.0603, |
|
"step": 4835 |
|
}, |
|
{ |
|
"epoch": 82.03, |
|
"learning_rate": 0.00013681136729441553, |
|
"loss": 0.0709, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 82.12, |
|
"learning_rate": 0.0001324896617732313, |
|
"loss": 0.0654, |
|
"step": 4845 |
|
}, |
|
{ |
|
"epoch": 82.2, |
|
"learning_rate": 0.00012366601836206402, |
|
"loss": 0.0706, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 82.29, |
|
"learning_rate": 0.0001109621885853097, |
|
"loss": 0.0702, |
|
"step": 4855 |
|
}, |
|
{ |
|
"epoch": 82.37, |
|
"learning_rate": 9.52733384374685e-05, |
|
"loss": 0.0743, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 82.46, |
|
"learning_rate": 7.770497117166324e-05, |
|
"loss": 0.0766, |
|
"step": 4865 |
|
}, |
|
{ |
|
"epoch": 82.54, |
|
"learning_rate": 5.949502882833789e-05, |
|
"loss": 0.0691, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 82.63, |
|
"learning_rate": 4.192666156253255e-05, |
|
"loss": 0.072, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 82.71, |
|
"learning_rate": 2.6237811414691212e-05, |
|
"loss": 0.0551, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 82.8, |
|
"learning_rate": 1.3533981637936661e-05, |
|
"loss": 0.0676, |
|
"step": 4885 |
|
}, |
|
{ |
|
"epoch": 82.88, |
|
"learning_rate": 4.710338226769111e-06, |
|
"loss": 0.0629, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 82.97, |
|
"learning_rate": 3.8863270558459543e-07, |
|
"loss": 0.0614, |
|
"step": 4895 |
|
}, |
|
{ |
|
"epoch": 83.05, |
|
"learning_rate": 8.733908661155503e-07, |
|
"loss": 0.057, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 83.14, |
|
"learning_rate": 6.13045458215347e-06, |
|
"loss": 0.0538, |
|
"step": 4905 |
|
}, |
|
{ |
|
"epoch": 83.22, |
|
"learning_rate": 1.5789388736708385e-05, |
|
"loss": 0.0509, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 83.31, |
|
"learning_rate": 2.9169583659290544e-05, |
|
"loss": 0.0513, |
|
"step": 4915 |
|
}, |
|
{ |
|
"epoch": 83.39, |
|
"learning_rate": 4.532821378430057e-05, |
|
"loss": 0.0465, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 83.47, |
|
"learning_rate": 6.312667315905837e-05, |
|
"loss": 0.0522, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 83.56, |
|
"learning_rate": 8.131080647237844e-05, |
|
"loss": 0.0574, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 83.64, |
|
"learning_rate": 9.85992821834785e-05, |
|
"loss": 0.0581, |
|
"step": 4935 |
|
}, |
|
{ |
|
"epoch": 83.73, |
|
"learning_rate": 0.0001137738806038615, |
|
"loss": 0.0563, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 83.81, |
|
"learning_rate": 0.0001257653348490597, |
|
"loss": 0.0587, |
|
"step": 4945 |
|
}, |
|
{ |
|
"epoch": 83.9, |
|
"learning_rate": 0.00013372867594093008, |
|
"loss": 0.0569, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 83.98, |
|
"learning_rate": 0.00013710277292342587, |
|
"loss": 0.0614, |
|
"step": 4955 |
|
}, |
|
{ |
|
"epoch": 84.07, |
|
"learning_rate": 0.00013564987254150598, |
|
"loss": 0.0651, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 84.15, |
|
"learning_rate": 0.00012947235234888035, |
|
"loss": 0.0607, |
|
"step": 4965 |
|
}, |
|
{ |
|
"epoch": 84.24, |
|
"learning_rate": 0.00011900550674920662, |
|
"loss": 0.0656, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 84.32, |
|
"learning_rate": 0.0001049868742970157, |
|
"loss": 0.0584, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 84.41, |
|
"learning_rate": 8.840426758749708e-05, |
|
"loss": 0.0668, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 84.49, |
|
"learning_rate": 7.04261677706317e-05, |
|
"loss": 0.0659, |
|
"step": 4985 |
|
}, |
|
{ |
|
"epoch": 84.58, |
|
"learning_rate": 5.231938838884316e-05, |
|
"loss": 0.066, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 84.66, |
|
"learning_rate": 3.535981029518282e-05, |
|
"loss": 0.0632, |
|
"step": 4995 |
|
}, |
|
{ |
|
"epoch": 84.75, |
|
"learning_rate": 2.0742477650140346e-05, |
|
"loss": 0.0742, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 84.83, |
|
"learning_rate": 9.4973900163838e-06, |
|
"loss": 0.0586, |
|
"step": 5005 |
|
}, |
|
{ |
|
"epoch": 84.92, |
|
"learning_rate": 2.4169241993239866e-06, |
|
"loss": 0.0588, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 85.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.0531, |
|
"step": 5015 |
|
}, |
|
{ |
|
"epoch": 85.08, |
|
"learning_rate": 2.416924199323735e-06, |
|
"loss": 0.0476, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 85.17, |
|
"learning_rate": 9.497390016383327e-06, |
|
"loss": 0.0496, |
|
"step": 5025 |
|
}, |
|
{ |
|
"epoch": 85.25, |
|
"learning_rate": 2.0742477650139675e-05, |
|
"loss": 0.0555, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 85.34, |
|
"learning_rate": 3.535981029517858e-05, |
|
"loss": 0.0468, |
|
"step": 5035 |
|
}, |
|
{ |
|
"epoch": 85.42, |
|
"learning_rate": 5.2319388388842245e-05, |
|
"loss": 0.0464, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 85.51, |
|
"learning_rate": 7.042616777063077e-05, |
|
"loss": 0.0492, |
|
"step": 5045 |
|
}, |
|
{ |
|
"epoch": 85.59, |
|
"learning_rate": 8.840426758749618e-05, |
|
"loss": 0.0536, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 85.68, |
|
"learning_rate": 0.00010498687429701491, |
|
"loss": 0.0514, |
|
"step": 5055 |
|
}, |
|
{ |
|
"epoch": 85.76, |
|
"learning_rate": 0.00011900550674920598, |
|
"loss": 0.057, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 85.85, |
|
"learning_rate": 0.00012947235234887991, |
|
"loss": 0.0542, |
|
"step": 5065 |
|
}, |
|
{ |
|
"epoch": 85.93, |
|
"learning_rate": 0.00013564987254150495, |
|
"loss": 0.0564, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 86.02, |
|
"learning_rate": 0.00013710277292342592, |
|
"loss": 0.064, |
|
"step": 5075 |
|
}, |
|
{ |
|
"epoch": 86.1, |
|
"learning_rate": 0.00013372867594093038, |
|
"loss": 0.0597, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 86.19, |
|
"learning_rate": 0.00012576533484906022, |
|
"loss": 0.0623, |
|
"step": 5085 |
|
}, |
|
{ |
|
"epoch": 86.27, |
|
"learning_rate": 0.0001137738806038622, |
|
"loss": 0.0575, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 86.36, |
|
"learning_rate": 9.859928218347934e-05, |
|
"loss": 0.0666, |
|
"step": 5095 |
|
}, |
|
{ |
|
"epoch": 86.44, |
|
"learning_rate": 8.131080647237936e-05, |
|
"loss": 0.0666, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 86.53, |
|
"learning_rate": 6.31266731590593e-05, |
|
"loss": 0.0662, |
|
"step": 5105 |
|
}, |
|
{ |
|
"epoch": 86.61, |
|
"learning_rate": 4.532821378430512e-05, |
|
"loss": 0.0634, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 86.69, |
|
"learning_rate": 2.9169583659291313e-05, |
|
"loss": 0.0579, |
|
"step": 5115 |
|
}, |
|
{ |
|
"epoch": 86.78, |
|
"learning_rate": 1.5789388736708978e-05, |
|
"loss": 0.0519, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 86.86, |
|
"learning_rate": 6.130454582153858e-06, |
|
"loss": 0.0604, |
|
"step": 5125 |
|
}, |
|
{ |
|
"epoch": 86.95, |
|
"learning_rate": 8.73390866115695e-07, |
|
"loss": 0.0576, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 87.03, |
|
"learning_rate": 3.8863270558449643e-07, |
|
"loss": 0.0524, |
|
"step": 5135 |
|
}, |
|
{ |
|
"epoch": 87.12, |
|
"learning_rate": 4.710338226768769e-06, |
|
"loss": 0.0541, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 87.2, |
|
"learning_rate": 1.3533981637933775e-05, |
|
"loss": 0.0473, |
|
"step": 5145 |
|
}, |
|
{ |
|
"epoch": 87.29, |
|
"learning_rate": 2.6237811414690473e-05, |
|
"loss": 0.049, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 87.37, |
|
"learning_rate": 4.192666156253168e-05, |
|
"loss": 0.0497, |
|
"step": 5155 |
|
}, |
|
{ |
|
"epoch": 87.46, |
|
"learning_rate": 5.949502882833696e-05, |
|
"loss": 0.0499, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 87.54, |
|
"learning_rate": 7.770497117166231e-05, |
|
"loss": 0.0495, |
|
"step": 5165 |
|
}, |
|
{ |
|
"epoch": 87.63, |
|
"learning_rate": 9.527333843746763e-05, |
|
"loss": 0.0543, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 87.71, |
|
"learning_rate": 0.00011096218858530895, |
|
"loss": 0.0479, |
|
"step": 5175 |
|
}, |
|
{ |
|
"epoch": 87.8, |
|
"learning_rate": 0.00012366601836206345, |
|
"loss": 0.0551, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 87.88, |
|
"learning_rate": 0.00013248966177322954, |
|
"loss": 0.0545, |
|
"step": 5185 |
|
}, |
|
{ |
|
"epoch": 87.97, |
|
"learning_rate": 0.00013681136729441542, |
|
"loss": 0.0589, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 88.05, |
|
"learning_rate": 0.00013632660913388443, |
|
"loss": 0.0645, |
|
"step": 5195 |
|
}, |
|
{ |
|
"epoch": 88.14, |
|
"learning_rate": 0.00013106954541784645, |
|
"loss": 0.0602, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 88.22, |
|
"learning_rate": 0.0001214106112632915, |
|
"loss": 0.0542, |
|
"step": 5205 |
|
}, |
|
{ |
|
"epoch": 88.31, |
|
"learning_rate": 0.0001080304163407093, |
|
"loss": 0.0617, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 88.39, |
|
"learning_rate": 9.187178621569925e-05, |
|
"loss": 0.063, |
|
"step": 5215 |
|
}, |
|
{ |
|
"epoch": 88.47, |
|
"learning_rate": 7.407332684094144e-05, |
|
"loss": 0.0639, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 88.56, |
|
"learning_rate": 5.5889193527625186e-05, |
|
"loss": 0.0587, |
|
"step": 5225 |
|
}, |
|
{ |
|
"epoch": 88.64, |
|
"learning_rate": 3.860071781652132e-05, |
|
"loss": 0.0594, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 88.73, |
|
"learning_rate": 2.342611939613834e-05, |
|
"loss": 0.0555, |
|
"step": 5235 |
|
}, |
|
{ |
|
"epoch": 88.81, |
|
"learning_rate": 1.1434665150940197e-05, |
|
"loss": 0.0594, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 88.9, |
|
"learning_rate": 3.4713240590698665e-06, |
|
"loss": 0.0553, |
|
"step": 5245 |
|
}, |
|
{ |
|
"epoch": 88.98, |
|
"learning_rate": 9.722707657413062e-08, |
|
"loss": 0.0614, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 89.07, |
|
"learning_rate": 1.5501274584940568e-06, |
|
"loss": 0.0495, |
|
"step": 5255 |
|
}, |
|
{ |
|
"epoch": 89.15, |
|
"learning_rate": 7.727647651119748e-06, |
|
"loss": 0.0453, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 89.24, |
|
"learning_rate": 1.8194493250793514e-05, |
|
"loss": 0.049, |
|
"step": 5265 |
|
}, |
|
{ |
|
"epoch": 89.32, |
|
"learning_rate": 3.221312570298446e-05, |
|
"loss": 0.0523, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 89.41, |
|
"learning_rate": 4.879573241250311e-05, |
|
"loss": 0.0466, |
|
"step": 5275 |
|
}, |
|
{ |
|
"epoch": 89.49, |
|
"learning_rate": 6.67738322293685e-05, |
|
"loss": 0.0471, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 89.58, |
|
"learning_rate": 8.488061161115704e-05, |
|
"loss": 0.0472, |
|
"step": 5285 |
|
}, |
|
{ |
|
"epoch": 89.66, |
|
"learning_rate": 0.00010184018970481735, |
|
"loss": 0.0432, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 89.75, |
|
"learning_rate": 0.0001164575223498598, |
|
"loss": 0.0476, |
|
"step": 5295 |
|
}, |
|
{ |
|
"epoch": 89.83, |
|
"learning_rate": 0.0001277026099836163, |
|
"loss": 0.0497, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 89.92, |
|
"learning_rate": 0.00013478307580067607, |
|
"loss": 0.0568, |
|
"step": 5305 |
|
}, |
|
{ |
|
"epoch": 90.0, |
|
"learning_rate": 0.0001372, |
|
"loss": 0.0585, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 90.08, |
|
"learning_rate": 0.0001347830758006762, |
|
"loss": 0.058, |
|
"step": 5315 |
|
}, |
|
{ |
|
"epoch": 90.17, |
|
"learning_rate": 0.0001277026099836166, |
|
"loss": 0.0557, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 90.25, |
|
"learning_rate": 0.00011645752234986017, |
|
"loss": 0.0601, |
|
"step": 5325 |
|
}, |
|
{ |
|
"epoch": 90.34, |
|
"learning_rate": 0.00010184018970482124, |
|
"loss": 0.0574, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 90.42, |
|
"learning_rate": 8.488061161115755e-05, |
|
"loss": 0.0634, |
|
"step": 5335 |
|
}, |
|
{ |
|
"epoch": 90.51, |
|
"learning_rate": 6.677383222936903e-05, |
|
"loss": 0.059, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 90.59, |
|
"learning_rate": 4.879573241250363e-05, |
|
"loss": 0.0638, |
|
"step": 5345 |
|
}, |
|
{ |
|
"epoch": 90.68, |
|
"learning_rate": 3.221312570298492e-05, |
|
"loss": 0.0514, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 90.76, |
|
"learning_rate": 1.819449325079388e-05, |
|
"loss": 0.0537, |
|
"step": 5355 |
|
}, |
|
{ |
|
"epoch": 90.85, |
|
"learning_rate": 7.727647651119999e-06, |
|
"loss": 0.048, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 90.93, |
|
"learning_rate": 1.550127458495001e-06, |
|
"loss": 0.0544, |
|
"step": 5365 |
|
}, |
|
{ |
|
"epoch": 91.02, |
|
"learning_rate": 9.722707657410776e-08, |
|
"loss": 0.0456, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 91.1, |
|
"learning_rate": 3.471324059069699e-06, |
|
"loss": 0.0485, |
|
"step": 5375 |
|
}, |
|
{ |
|
"epoch": 91.19, |
|
"learning_rate": 1.14346651509399e-05, |
|
"loss": 0.047, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 91.27, |
|
"learning_rate": 2.3426119396137937e-05, |
|
"loss": 0.0489, |
|
"step": 5385 |
|
}, |
|
{ |
|
"epoch": 91.36, |
|
"learning_rate": 3.860071781652084e-05, |
|
"loss": 0.0459, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 91.44, |
|
"learning_rate": 5.588919352762083e-05, |
|
"loss": 0.0471, |
|
"step": 5395 |
|
}, |
|
{ |
|
"epoch": 91.53, |
|
"learning_rate": 7.407332684094089e-05, |
|
"loss": 0.0428, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 91.61, |
|
"learning_rate": 9.187178621569508e-05, |
|
"loss": 0.0452, |
|
"step": 5405 |
|
}, |
|
{ |
|
"epoch": 91.69, |
|
"learning_rate": 0.00010803041634070884, |
|
"loss": 0.0513, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 91.78, |
|
"learning_rate": 0.00012141061126329114, |
|
"loss": 0.0464, |
|
"step": 5415 |
|
}, |
|
{ |
|
"epoch": 91.86, |
|
"learning_rate": 0.00013106954541784624, |
|
"loss": 0.0518, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 91.95, |
|
"learning_rate": 0.00013632660913388432, |
|
"loss": 0.0564, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 92.03, |
|
"learning_rate": 0.00013681136729441548, |
|
"loss": 0.051, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 92.12, |
|
"learning_rate": 0.00013248966177323117, |
|
"loss": 0.0527, |
|
"step": 5435 |
|
}, |
|
{ |
|
"epoch": 92.2, |
|
"learning_rate": 0.0001236660183620661, |
|
"loss": 0.0553, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 92.29, |
|
"learning_rate": 0.00011096218858530938, |
|
"loss": 0.0554, |
|
"step": 5445 |
|
}, |
|
{ |
|
"epoch": 92.37, |
|
"learning_rate": 9.527333843746813e-05, |
|
"loss": 0.0556, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 92.46, |
|
"learning_rate": 7.770497117166284e-05, |
|
"loss": 0.0588, |
|
"step": 5455 |
|
}, |
|
{ |
|
"epoch": 92.54, |
|
"learning_rate": 5.9495028828337496e-05, |
|
"loss": 0.0569, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 92.63, |
|
"learning_rate": 4.192666156253218e-05, |
|
"loss": 0.0547, |
|
"step": 5465 |
|
}, |
|
{ |
|
"epoch": 92.71, |
|
"learning_rate": 2.623781141469089e-05, |
|
"loss": 0.0554, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 92.8, |
|
"learning_rate": 1.3533981637936417e-05, |
|
"loss": 0.0541, |
|
"step": 5475 |
|
}, |
|
{ |
|
"epoch": 92.88, |
|
"learning_rate": 4.710338226770383e-06, |
|
"loss": 0.0486, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 92.97, |
|
"learning_rate": 3.8863270558454974e-07, |
|
"loss": 0.0512, |
|
"step": 5485 |
|
}, |
|
{ |
|
"epoch": 93.05, |
|
"learning_rate": 8.733908661156113e-07, |
|
"loss": 0.047, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 93.14, |
|
"learning_rate": 6.130454582153638e-06, |
|
"loss": 0.0441, |
|
"step": 5495 |
|
}, |
|
{ |
|
"epoch": 93.22, |
|
"learning_rate": 1.5789388736708636e-05, |
|
"loss": 0.0469, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 93.31, |
|
"learning_rate": 2.9169583659290876e-05, |
|
"loss": 0.0444, |
|
"step": 5505 |
|
}, |
|
{ |
|
"epoch": 93.39, |
|
"learning_rate": 4.532821378430094e-05, |
|
"loss": 0.0462, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 93.47, |
|
"learning_rate": 6.312667315905488e-05, |
|
"loss": 0.0411, |
|
"step": 5515 |
|
}, |
|
{ |
|
"epoch": 93.56, |
|
"learning_rate": 8.131080647237501e-05, |
|
"loss": 0.0466, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 93.64, |
|
"learning_rate": 9.859928218347886e-05, |
|
"loss": 0.0455, |
|
"step": 5525 |
|
}, |
|
{ |
|
"epoch": 93.73, |
|
"learning_rate": 0.00011377388060386181, |
|
"loss": 0.0421, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 93.81, |
|
"learning_rate": 0.00012576533484905992, |
|
"loss": 0.049, |
|
"step": 5535 |
|
}, |
|
{ |
|
"epoch": 93.9, |
|
"learning_rate": 0.00013372867594093019, |
|
"loss": 0.0533, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 93.98, |
|
"learning_rate": 0.00013710277292342587, |
|
"loss": 0.06, |
|
"step": 5545 |
|
}, |
|
{ |
|
"epoch": 94.07, |
|
"learning_rate": 0.0001356498725415059, |
|
"loss": 0.0563, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 94.15, |
|
"learning_rate": 0.00012947235234888197, |
|
"loss": 0.0559, |
|
"step": 5555 |
|
}, |
|
{ |
|
"epoch": 94.24, |
|
"learning_rate": 0.00011900550674920635, |
|
"loss": 0.065, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 94.32, |
|
"learning_rate": 0.00010498687429701536, |
|
"loss": 0.0568, |
|
"step": 5565 |
|
}, |
|
{ |
|
"epoch": 94.41, |
|
"learning_rate": 8.840426758749669e-05, |
|
"loss": 0.0619, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 94.49, |
|
"learning_rate": 7.042616777063131e-05, |
|
"loss": 0.0625, |
|
"step": 5575 |
|
}, |
|
{ |
|
"epoch": 94.58, |
|
"learning_rate": 5.231938838884276e-05, |
|
"loss": 0.0563, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 94.66, |
|
"learning_rate": 3.5359810295182465e-05, |
|
"loss": 0.0581, |
|
"step": 5585 |
|
}, |
|
{ |
|
"epoch": 94.75, |
|
"learning_rate": 2.074247765014285e-05, |
|
"loss": 0.0551, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 94.83, |
|
"learning_rate": 9.497390016385574e-06, |
|
"loss": 0.0524, |
|
"step": 5595 |
|
}, |
|
{ |
|
"epoch": 94.92, |
|
"learning_rate": 2.41692419932388e-06, |
|
"loss": 0.0431, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 95.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.0466, |
|
"step": 5605 |
|
}, |
|
{ |
|
"epoch": 95.08, |
|
"learning_rate": 2.4169241993238418e-06, |
|
"loss": 0.0458, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 95.17, |
|
"learning_rate": 9.497390016383526e-06, |
|
"loss": 0.0504, |
|
"step": 5615 |
|
}, |
|
{ |
|
"epoch": 95.25, |
|
"learning_rate": 2.0742477650139963e-05, |
|
"loss": 0.0462, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 95.34, |
|
"learning_rate": 3.535981029517894e-05, |
|
"loss": 0.0432, |
|
"step": 5625 |
|
}, |
|
{ |
|
"epoch": 95.42, |
|
"learning_rate": 5.231938838883885e-05, |
|
"loss": 0.0389, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 95.51, |
|
"learning_rate": 7.042616777063117e-05, |
|
"loss": 0.0444, |
|
"step": 5635 |
|
}, |
|
{ |
|
"epoch": 95.59, |
|
"learning_rate": 8.840426758749657e-05, |
|
"loss": 0.0406, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 95.68, |
|
"learning_rate": 0.00010498687429701524, |
|
"loss": 0.0471, |
|
"step": 5645 |
|
}, |
|
{ |
|
"epoch": 95.76, |
|
"learning_rate": 0.00011900550674920625, |
|
"loss": 0.0442, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 95.85, |
|
"learning_rate": 0.0001294723523488801, |
|
"loss": 0.0465, |
|
"step": 5655 |
|
}, |
|
{ |
|
"epoch": 95.93, |
|
"learning_rate": 0.00013564987254150503, |
|
"loss": 0.0506, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 96.02, |
|
"learning_rate": 0.0001371027729234259, |
|
"loss": 0.0551, |
|
"step": 5665 |
|
}, |
|
{ |
|
"epoch": 96.1, |
|
"learning_rate": 0.00013372867594093146, |
|
"loss": 0.0517, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 96.19, |
|
"learning_rate": 0.00012576533484906, |
|
"loss": 0.0512, |
|
"step": 5675 |
|
}, |
|
{ |
|
"epoch": 96.27, |
|
"learning_rate": 0.00011377388060386191, |
|
"loss": 0.057, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 96.36, |
|
"learning_rate": 9.859928218347899e-05, |
|
"loss": 0.0536, |
|
"step": 5685 |
|
}, |
|
{ |
|
"epoch": 96.44, |
|
"learning_rate": 8.131080647237897e-05, |
|
"loss": 0.0631, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 96.53, |
|
"learning_rate": 6.31266731590589e-05, |
|
"loss": 0.0536, |
|
"step": 5695 |
|
}, |
|
{ |
|
"epoch": 96.61, |
|
"learning_rate": 4.532821378430474e-05, |
|
"loss": 0.0553, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 96.69, |
|
"learning_rate": 2.9169583659290984e-05, |
|
"loss": 0.0547, |
|
"step": 5705 |
|
}, |
|
{ |
|
"epoch": 96.78, |
|
"learning_rate": 1.5789388736708727e-05, |
|
"loss": 0.0504, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 96.86, |
|
"learning_rate": 6.130454582153691e-06, |
|
"loss": 0.051, |
|
"step": 5715 |
|
}, |
|
{ |
|
"epoch": 96.95, |
|
"learning_rate": 8.733908661156341e-07, |
|
"loss": 0.0544, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 97.03, |
|
"learning_rate": 3.886327055845345e-07, |
|
"loss": 0.0491, |
|
"step": 5725 |
|
}, |
|
{ |
|
"epoch": 97.12, |
|
"learning_rate": 4.710338226768914e-06, |
|
"loss": 0.0393, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 97.2, |
|
"learning_rate": 1.353398163793401e-05, |
|
"loss": 0.0491, |
|
"step": 5735 |
|
}, |
|
{ |
|
"epoch": 97.29, |
|
"learning_rate": 2.6237811414690785e-05, |
|
"loss": 0.0451, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 97.37, |
|
"learning_rate": 4.1926661562532056e-05, |
|
"loss": 0.0457, |
|
"step": 5745 |
|
}, |
|
{ |
|
"epoch": 97.46, |
|
"learning_rate": 5.949502882833735e-05, |
|
"loss": 0.0365, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 97.54, |
|
"learning_rate": 7.77049711716627e-05, |
|
"loss": 0.0395, |
|
"step": 5755 |
|
}, |
|
{ |
|
"epoch": 97.63, |
|
"learning_rate": 9.527333843746801e-05, |
|
"loss": 0.0429, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 97.71, |
|
"learning_rate": 0.00011096218858530927, |
|
"loss": 0.0457, |
|
"step": 5765 |
|
}, |
|
{ |
|
"epoch": 97.8, |
|
"learning_rate": 0.0001236660183620637, |
|
"loss": 0.0419, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 97.88, |
|
"learning_rate": 0.0001324896617732297, |
|
"loss": 0.0472, |
|
"step": 5775 |
|
}, |
|
{ |
|
"epoch": 97.97, |
|
"learning_rate": 0.00013681136729441548, |
|
"loss": 0.0512, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 98.05, |
|
"learning_rate": 0.00013632660913388435, |
|
"loss": 0.0487, |
|
"step": 5785 |
|
}, |
|
{ |
|
"epoch": 98.14, |
|
"learning_rate": 0.0001310695454178463, |
|
"loss": 0.0505, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 98.22, |
|
"learning_rate": 0.00012141061126329122, |
|
"loss": 0.0493, |
|
"step": 5795 |
|
}, |
|
{ |
|
"epoch": 98.31, |
|
"learning_rate": 0.00010803041634070896, |
|
"loss": 0.0533, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 98.39, |
|
"learning_rate": 9.187178621569886e-05, |
|
"loss": 0.0511, |
|
"step": 5805 |
|
}, |
|
{ |
|
"epoch": 98.47, |
|
"learning_rate": 7.407332684094491e-05, |
|
"loss": 0.0488, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 98.56, |
|
"learning_rate": 5.588919352762479e-05, |
|
"loss": 0.057, |
|
"step": 5815 |
|
}, |
|
{ |
|
"epoch": 98.64, |
|
"learning_rate": 3.860071781652096e-05, |
|
"loss": 0.0504, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 98.73, |
|
"learning_rate": 2.3426119396138036e-05, |
|
"loss": 0.051, |
|
"step": 5825 |
|
}, |
|
{ |
|
"epoch": 98.81, |
|
"learning_rate": 1.1434665150939969e-05, |
|
"loss": 0.0492, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 98.9, |
|
"learning_rate": 3.471324059069745e-06, |
|
"loss": 0.045, |
|
"step": 5835 |
|
}, |
|
{ |
|
"epoch": 98.98, |
|
"learning_rate": 9.722707657410776e-08, |
|
"loss": 0.0443, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 99.07, |
|
"learning_rate": 1.550127458494148e-06, |
|
"loss": 0.0471, |
|
"step": 5845 |
|
}, |
|
{ |
|
"epoch": 99.15, |
|
"learning_rate": 7.72764765111814e-06, |
|
"loss": 0.0412, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 99.24, |
|
"learning_rate": 1.819449325079379e-05, |
|
"loss": 0.036, |
|
"step": 5855 |
|
}, |
|
{ |
|
"epoch": 99.32, |
|
"learning_rate": 3.22131257029848e-05, |
|
"loss": 0.0413, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 99.41, |
|
"learning_rate": 4.879573241250349e-05, |
|
"loss": 0.0414, |
|
"step": 5865 |
|
}, |
|
{ |
|
"epoch": 99.49, |
|
"learning_rate": 6.67738322293689e-05, |
|
"loss": 0.0373, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 99.58, |
|
"learning_rate": 8.488061161115743e-05, |
|
"loss": 0.0373, |
|
"step": 5875 |
|
}, |
|
{ |
|
"epoch": 99.66, |
|
"learning_rate": 0.00010184018970481772, |
|
"loss": 0.0382, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 99.75, |
|
"learning_rate": 0.0001164575223498573, |
|
"loss": 0.0412, |
|
"step": 5885 |
|
}, |
|
{ |
|
"epoch": 99.83, |
|
"learning_rate": 0.00012770260998361453, |
|
"loss": 0.0452, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 99.92, |
|
"learning_rate": 0.00013478307580067617, |
|
"loss": 0.0459, |
|
"step": 5895 |
|
}, |
|
{ |
|
"epoch": 100.0, |
|
"learning_rate": 0.0001372, |
|
"loss": 0.0481, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 100.08, |
|
"learning_rate": 0.0001347830758006761, |
|
"loss": 0.0498, |
|
"step": 5905 |
|
}, |
|
{ |
|
"epoch": 100.17, |
|
"learning_rate": 0.00012770260998361637, |
|
"loss": 0.0453, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 100.25, |
|
"learning_rate": 0.0001164575223498599, |
|
"loss": 0.0556, |
|
"step": 5915 |
|
}, |
|
{ |
|
"epoch": 100.34, |
|
"learning_rate": 0.00010184018970482089, |
|
"loss": 0.0514, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 100.42, |
|
"learning_rate": 8.488061161116095e-05, |
|
"loss": 0.0529, |
|
"step": 5925 |
|
}, |
|
{ |
|
"epoch": 100.51, |
|
"learning_rate": 6.677383222936864e-05, |
|
"loss": 0.0467, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 100.59, |
|
"learning_rate": 4.879573241250324e-05, |
|
"loss": 0.0495, |
|
"step": 5935 |
|
}, |
|
{ |
|
"epoch": 100.68, |
|
"learning_rate": 3.221312570298457e-05, |
|
"loss": 0.0482, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 100.76, |
|
"learning_rate": 1.8194493250793605e-05, |
|
"loss": 0.0465, |
|
"step": 5945 |
|
}, |
|
{ |
|
"epoch": 100.85, |
|
"learning_rate": 7.727647651119809e-06, |
|
"loss": 0.0495, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 100.93, |
|
"learning_rate": 1.5501274584949098e-06, |
|
"loss": 0.0458, |
|
"step": 5955 |
|
} |
|
], |
|
"max_steps": 5959, |
|
"num_train_epochs": 101, |
|
"total_flos": 6201766379520000.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|