|
{ |
|
"best_metric": 0.21771365404129028, |
|
"best_model_checkpoint": "output/eminem/checkpoint-1812", |
|
"epoch": 4.0, |
|
"global_step": 1812, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.438570471456781e-07, |
|
"loss": 0.5631, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.3766395364652078e-06, |
|
"loss": 0.4984, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.8898866416840713e-06, |
|
"loss": 0.5528, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.4829840193536984e-06, |
|
"loss": 0.6369, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.15522174732073e-06, |
|
"loss": 0.5848, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.9057951744922e-06, |
|
"loss": 0.5654, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.733805883976431e-06, |
|
"loss": 0.5758, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 5.638262768466482e-06, |
|
"loss": 0.5143, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.618083216572411e-06, |
|
"loss": 0.5341, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.67209440867359e-06, |
|
"loss": 0.4776, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.799034720759786e-06, |
|
"loss": 0.5263, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.99755523455637e-06, |
|
"loss": 0.5323, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.1266221352146829e-05, |
|
"loss": 0.5985, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.2603514513152563e-05, |
|
"loss": 0.5842, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.4007834012403921e-05, |
|
"loss": 0.652, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.5477498915945647e-05, |
|
"loss": 0.5753, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.7010750073074952e-05, |
|
"loss": 0.5635, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8605752221990662e-05, |
|
"loss": 0.4981, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.0260596186565352e-05, |
|
"loss": 0.5425, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.197330116156359e-05, |
|
"loss": 0.4698, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.374181708363339e-05, |
|
"loss": 0.5, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.5564027085163642e-05, |
|
"loss": 0.5386, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.7437750028125014e-05, |
|
"loss": 0.5554, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.9360743114840313e-05, |
|
"loss": 0.5243, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.133070457254414e-05, |
|
"loss": 0.5746, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.334527640855775e-05, |
|
"loss": 0.5865, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.5402047232733794e-05, |
|
"loss": 0.5271, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.74985551438323e-05, |
|
"loss": 0.5387, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.963229067635792e-05, |
|
"loss": 0.5834, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.180069980431271e-05, |
|
"loss": 0.5346, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.4001186998313e-05, |
|
"loss": 0.5753, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.623111833235809e-05, |
|
"loss": 0.5956, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.848782463657661e-05, |
|
"loss": 0.556, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 5.076860469215913e-05, |
|
"loss": 0.6621, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 5.307072846463573e-05, |
|
"loss": 0.5567, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 5.5391440371663775e-05, |
|
"loss": 0.5614, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.7727962581384594e-05, |
|
"loss": 0.541, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 6.007749833742374e-05, |
|
"loss": 0.6, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 6.243723530652418e-05, |
|
"loss": 0.5782, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.480434894484185e-05, |
|
"loss": 0.6341, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.717600587885714e-05, |
|
"loss": 0.6118, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.95493672968377e-05, |
|
"loss": 0.5678, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.192159234682814e-05, |
|
"loss": 0.576, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.428984153708189e-05, |
|
"loss": 0.4995, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 7.66512801348454e-05, |
|
"loss": 0.5505, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 7.90030815594751e-05, |
|
"loss": 0.5138, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 8.134243076576756e-05, |
|
"loss": 0.642, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 8.36665276135011e-05, |
|
"loss": 0.5566, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 8.597259021913672e-05, |
|
"loss": 0.6086, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.82578582856489e-05, |
|
"loss": 0.5626, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.051959640653713e-05, |
|
"loss": 0.5904, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.275509734003259e-05, |
|
"loss": 0.5694, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.49616852496036e-05, |
|
"loss": 0.5298, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.713671890685142e-05, |
|
"loss": 0.5779, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.927759485299755e-05, |
|
"loss": 0.5908, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00010138175051516152, |
|
"loss": 0.5923, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00010344666727368174, |
|
"loss": 0.5174, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00010546987347685163, |
|
"loss": 0.5551, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00010744894739941056, |
|
"loss": 0.5551, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00010938152014129127, |
|
"loss": 0.5935, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00011126527846313643, |
|
"loss": 0.6027, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00011309796755517486, |
|
"loss": 0.6107, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00011487739373618269, |
|
"loss": 0.6027, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00011660142707925222, |
|
"loss": 0.6548, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00011826800396126505, |
|
"loss": 0.576, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00011987512953299384, |
|
"loss": 0.6334, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00012142088010688381, |
|
"loss": 0.5323, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00012290340545966178, |
|
"loss": 0.6278, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00012432093104699756, |
|
"loss": 0.7043, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00012567176012759175, |
|
"loss": 0.5754, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00012695427579413395, |
|
"loss": 0.5657, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00012816694290869102, |
|
"loss": 0.654, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00012930830994023598, |
|
"loss": 0.687, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00013037701070208566, |
|
"loss": 0.7228, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.00013137176598719397, |
|
"loss": 0.583, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.00013229138509933167, |
|
"loss": 0.6198, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.00013313476727831326, |
|
"loss": 0.6863, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.00013390090301758438, |
|
"loss": 0.6002, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.00013458887527256983, |
|
"loss": 0.5305, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.0001351978605583551, |
|
"loss": 0.647, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.00013572712993537557, |
|
"loss": 0.6578, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.0001361760498819379, |
|
"loss": 0.694, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.00013654408305253043, |
|
"loss": 0.6405, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.00013683078892100958, |
|
"loss": 0.598, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.00013703582430790024, |
|
"loss": 0.6474, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.00013715894379117126, |
|
"loss": 0.6811, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.0001372, |
|
"loss": 0.6149, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.00013715894379117126, |
|
"loss": 0.6465, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 0.00013703582430790037, |
|
"loss": 0.6784, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 0.00013683078892100977, |
|
"loss": 0.6619, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.36211052536964417, |
|
"eval_runtime": 7.9758, |
|
"eval_samples_per_second": 81.371, |
|
"eval_steps_per_second": 10.281, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.00013654408305253043, |
|
"loss": 0.7554, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 0.0001361760498819382, |
|
"loss": 0.5685, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 0.00013572712993537557, |
|
"loss": 0.4916, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 0.0001351978605583551, |
|
"loss": 0.6418, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 0.0001345888752725698, |
|
"loss": 0.5455, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 0.00013390090301758435, |
|
"loss": 0.5476, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 0.0001331347672783139, |
|
"loss": 0.5503, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 0.00013229138509933164, |
|
"loss": 0.5999, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 0.00013137176598719473, |
|
"loss": 0.5831, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 0.00013037701070208564, |
|
"loss": 0.5574, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 0.00012930830994023595, |
|
"loss": 0.6027, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 0.00012816694290869194, |
|
"loss": 0.6368, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 0.00012695427579413393, |
|
"loss": 0.6659, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 0.0001256717601275928, |
|
"loss": 0.5791, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 0.00012432093104699753, |
|
"loss": 0.6579, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 0.00012290340545966172, |
|
"loss": 0.5917, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 0.00012142088010688503, |
|
"loss": 0.5659, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 0.0001198751295329938, |
|
"loss": 0.5619, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 0.00011826800396126635, |
|
"loss": 0.5993, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 0.00011660142707925218, |
|
"loss": 0.5983, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 0.00011487739373618408, |
|
"loss": 0.5408, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 0.00011309796755517628, |
|
"loss": 0.5652, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 0.00011126527846313638, |
|
"loss": 0.6357, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 0.0001093815201412928, |
|
"loss": 0.6156, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 0.00010744894739941049, |
|
"loss": 0.5459, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 0.00010546987347685321, |
|
"loss": 0.5747, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 0.00010344666727368336, |
|
"loss": 0.6251, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 0.00010138175051516146, |
|
"loss": 0.6133, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 9.927759485299923e-05, |
|
"loss": 0.6489, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 9.713671890685137e-05, |
|
"loss": 0.6098, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 9.496168524960355e-05, |
|
"loss": 0.6389, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 9.275509734003435e-05, |
|
"loss": 0.5117, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 9.051959640653706e-05, |
|
"loss": 0.5879, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 8.825785828565071e-05, |
|
"loss": 0.5736, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 8.597259021913666e-05, |
|
"loss": 0.5553, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 8.366652761350294e-05, |
|
"loss": 0.6493, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 8.13424307657675e-05, |
|
"loss": 0.5273, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 7.900308155947505e-05, |
|
"loss": 0.5613, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 7.665128013484727e-05, |
|
"loss": 0.55, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 7.428984153708183e-05, |
|
"loss": 0.5884, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 7.192159234683002e-05, |
|
"loss": 0.5982, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 6.954936729683765e-05, |
|
"loss": 0.5926, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 6.717600587885708e-05, |
|
"loss": 0.5806, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 6.480434894484373e-05, |
|
"loss": 0.5951, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 6.243723530652412e-05, |
|
"loss": 0.5371, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 6.007749833742561e-05, |
|
"loss": 0.607, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 5.772796258138646e-05, |
|
"loss": 0.5796, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 5.5391440371663714e-05, |
|
"loss": 0.6185, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 5.307072846463756e-05, |
|
"loss": 0.5427, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 5.076860469215906e-05, |
|
"loss": 0.5822, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.8487824636578415e-05, |
|
"loss": 0.6119, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.623111833235802e-05, |
|
"loss": 0.4964, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.400118699831475e-05, |
|
"loss": 0.5858, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.180069980431445e-05, |
|
"loss": 0.5173, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 3.963229067635786e-05, |
|
"loss": 0.4972, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 3.749855514383397e-05, |
|
"loss": 0.5399, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 3.540204723273373e-05, |
|
"loss": 0.5664, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 3.33452764085577e-05, |
|
"loss": 0.5974, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.133070457254572e-05, |
|
"loss": 0.5232, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.9360743114840252e-05, |
|
"loss": 0.6258, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.7437750028126522e-05, |
|
"loss": 0.5125, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.5564027085163588e-05, |
|
"loss": 0.6473, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.374181708363334e-05, |
|
"loss": 0.5284, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.197330116156497e-05, |
|
"loss": 0.5848, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.0260596186565298e-05, |
|
"loss": 0.535, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.860575222199195e-05, |
|
"loss": 0.5386, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.7010750073074908e-05, |
|
"loss": 0.5661, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.5477498915946836e-05, |
|
"loss": 0.5236, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.4007834012403882e-05, |
|
"loss": 0.5898, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.2603514513152518e-05, |
|
"loss": 0.6072, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.1266221352147866e-05, |
|
"loss": 0.4956, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 9.99755523455634e-06, |
|
"loss": 0.5137, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 8.799034720760707e-06, |
|
"loss": 0.4718, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.672094408674458e-06, |
|
"loss": 0.5598, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 6.61808321657238e-06, |
|
"loss": 0.5329, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 5.638262768467229e-06, |
|
"loss": 0.5635, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.733805883976409e-06, |
|
"loss": 0.6177, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 3.905795174492832e-06, |
|
"loss": 0.5325, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 3.1552217473212937e-06, |
|
"loss": 0.6092, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 2.4829840193536756e-06, |
|
"loss": 0.5838, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8898866416845054e-06, |
|
"loss": 0.5179, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.3766395364651925e-06, |
|
"loss": 0.587, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 9.438570471459905e-07, |
|
"loss": 0.5222, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 5.920572031214825e-07, |
|
"loss": 0.6116, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 3.2166109966502293e-07, |
|
"loss": 0.4959, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.329923938872171e-07, |
|
"loss": 0.5544, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 2.627691732664299e-08, |
|
"loss": 0.571, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.6424056362860595e-09, |
|
"loss": 0.5854, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.911834568653918e-08, |
|
"loss": 0.5492, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.986359402705409e-07, |
|
"loss": 0.5403, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.2002819045225533e-07, |
|
"loss": 0.5372, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 0.35109472274780273, |
|
"eval_runtime": 7.9799, |
|
"eval_samples_per_second": 81.33, |
|
"eval_steps_per_second": 10.276, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 9.30995090154817e-06, |
|
"loss": 0.4526, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.0541855162378925e-05, |
|
"loss": 0.5261, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.1843560639183314e-05, |
|
"loss": 0.5046, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.321350233836306e-05, |
|
"loss": 0.6349, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.4650033228364816e-05, |
|
"loss": 0.5519, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.615142621984976e-05, |
|
"loss": 0.6092, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.7715876242113545e-05, |
|
"loss": 0.5618, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.9341502413255598e-05, |
|
"loss": 0.5784, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 2.10263503014928e-05, |
|
"loss": 0.6022, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 2.2768394274904872e-05, |
|
"loss": 0.5783, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 2.4565539936783126e-05, |
|
"loss": 0.551, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 2.6415626643638117e-05, |
|
"loss": 0.524, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 2.8316430102875185e-05, |
|
"loss": 0.5542, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.026566504696925e-05, |
|
"loss": 0.5175, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.226098798097204e-05, |
|
"loss": 0.5524, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.4300000000000576e-05, |
|
"loss": 0.6188, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.638024967335875e-05, |
|
"loss": 0.5017, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.8499235991807596e-05, |
|
"loss": 0.5877, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 4.065441137443541e-05, |
|
"loss": 0.4836, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 4.2843184731518815e-05, |
|
"loss": 0.4962, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 4.506292457970005e-05, |
|
"loss": 0.5619, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 4.731096220573011e-05, |
|
"loss": 0.5061, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 4.9584594874953847e-05, |
|
"loss": 0.5603, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 5.188108908072324e-05, |
|
"loss": 0.6256, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 5.419768383078854e-05, |
|
"loss": 0.5088, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 5.653159396673597e-05, |
|
"loss": 0.5927, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 5.888001351248702e-05, |
|
"loss": 0.5919, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 6.124011904782824e-05, |
|
"loss": 0.5993, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 6.36090731028942e-05, |
|
"loss": 0.5514, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 6.598402756957074e-05, |
|
"loss": 0.6111, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 6.836212712565718e-05, |
|
"loss": 0.5716, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 7.074051266773294e-05, |
|
"loss": 0.5673, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 7.311632474854093e-05, |
|
"loss": 0.6117, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 7.54867070148039e-05, |
|
"loss": 0.5824, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 7.784880964131884e-05, |
|
"loss": 0.5873, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 8.019979275719533e-05, |
|
"loss": 0.6133, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 8.253682986012616e-05, |
|
"loss": 0.6222, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 8.485711121459308e-05, |
|
"loss": 0.5468, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 8.715784722991633e-05, |
|
"loss": 0.5227, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 8.943627181406567e-05, |
|
"loss": 0.5709, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.168964569924647e-05, |
|
"loss": 0.6087, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.391525973521692e-05, |
|
"loss": 0.5605, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 9.611043814639816e-05, |
|
"loss": 0.6505, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 9.827254174886847e-05, |
|
"loss": 0.5743, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 0.00010039897112335537, |
|
"loss": 0.5898, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 0.000102487169740429, |
|
"loss": 0.5651, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 0.00010453462703412173, |
|
"loss": 0.5988, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 0.00010653888142028633, |
|
"loss": 0.6178, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 0.00010849752325608153, |
|
"loss": 0.5959, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 0.00011040819773698719, |
|
"loss": 0.6657, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 0.00011226860772790607, |
|
"loss": 0.6, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 0.0001140765165249296, |
|
"loss": 0.554, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 0.00011582975054444297, |
|
"loss": 0.5723, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 0.00011752620193634387, |
|
"loss": 0.6071, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 0.00011916383111823773, |
|
"loss": 0.5496, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 0.00012074066922755797, |
|
"loss": 0.6531, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 0.00012225482048864924, |
|
"loss": 0.6045, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 0.00012370446449200064, |
|
"loss": 0.6114, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 0.00012508785838284846, |
|
"loss": 0.6003, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 0.0001264033389565572, |
|
"loss": 0.6637, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 0.00012764932465822253, |
|
"loss": 0.6116, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 0.00012882431748412055, |
|
"loss": 0.6878, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 0.00012992690478271, |
|
"loss": 0.5817, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 0.0001309557609530009, |
|
"loss": 0.6061, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 0.0001319096490382838, |
|
"loss": 0.6419, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 0.00013278742221327952, |
|
"loss": 0.5573, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 0.0001335880251629203, |
|
"loss": 0.6822, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 0.0001343104953511231, |
|
"loss": 0.6704, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 0.00013495396417800677, |
|
"loss": 0.6309, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 0.00013551765802418287, |
|
"loss": 0.5814, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 0.0001360008991808469, |
|
"loss": 0.6252, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 0.00013640310666456482, |
|
"loss": 0.6373, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 0.00013672379691576902, |
|
"loss": 0.6937, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 0.0001369625843801236, |
|
"loss": 0.6196, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 0.0001371191819720623, |
|
"loss": 0.6718, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 0.0001371934014199418, |
|
"loss": 0.6286, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 0.00013718515349239374, |
|
"loss": 0.6942, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 0.00013709444810560437, |
|
"loss": 0.7029, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 0.00013692139431139254, |
|
"loss": 0.6829, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 0.00013666620016610043, |
|
"loss": 0.6676, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 0.0001363291724804556, |
|
"loss": 0.6491, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 0.00013591071645070298, |
|
"loss": 0.6058, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 0.00013541133517145257, |
|
"loss": 0.6635, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 0.00013483162903082574, |
|
"loss": 0.6435, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 0.00013417229498863067, |
|
"loss": 0.6009, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 0.00013343412573843465, |
|
"loss": 0.6296, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 0.0001326180087545332, |
|
"loss": 0.585, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 0.00013172492522497574, |
|
"loss": 0.6359, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 0.00013075594887191708, |
|
"loss": 0.5417, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 0.0001297122446607181, |
|
"loss": 0.7232, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 0.3138461709022522, |
|
"eval_runtime": 8.6365, |
|
"eval_samples_per_second": 76.072, |
|
"eval_steps_per_second": 9.61, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 1.4667747726642993e-05, |
|
"loss": 0.6323, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 1.3214755505568174e-05, |
|
"loss": 0.5288, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 1.1829845442054389e-05, |
|
"loss": 0.4822, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 1.0514719932940577e-05, |
|
"loss": 0.5699, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 9.270995592595549e-06, |
|
"loss": 0.5351, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 8.100201265703248e-06, |
|
"loss": 0.6382, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 7.0037761479277354e-06, |
|
"loss": 0.5365, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 5.983068016793553e-06, |
|
"loss": 0.5665, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 5.039331574927566e-06, |
|
"loss": 0.5169, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 4.173726907722877e-06, |
|
"loss": 0.5302, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 3.387318057306161e-06, |
|
"loss": 0.5061, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 2.6810717145662967e-06, |
|
"loss": 0.5636, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 2.0558560308545347e-06, |
|
"loss": 0.5727, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 1.5124395508079945e-06, |
|
"loss": 0.5178, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 1.0514902676223786e-06, |
|
"loss": 0.5668, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 6.735748019196765e-07, |
|
"loss": 0.5409, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 3.791577052345188e-07, |
|
"loss": 0.5488, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 1.6860088896217012e-07, |
|
"loss": 0.5891, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 4.216317948330257e-08, |
|
"loss": 0.6293, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 0.0, |
|
"loss": 0.5776, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 4.216317948328734e-08, |
|
"loss": 0.6373, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 1.6860088896213203e-07, |
|
"loss": 0.6081, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 3.791577052344655e-07, |
|
"loss": 0.5801, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 6.73574801919608e-07, |
|
"loss": 0.5491, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 1.051490267622295e-06, |
|
"loss": 0.5859, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 1.5124395508078954e-06, |
|
"loss": 0.5745, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 2.055856030853948e-06, |
|
"loss": 0.4955, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 2.6810717145661676e-06, |
|
"loss": 0.5646, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 3.387318057305407e-06, |
|
"loss": 0.6039, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 4.173726907722717e-06, |
|
"loss": 0.5249, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 5.039331574926652e-06, |
|
"loss": 0.5415, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 5.983068016793356e-06, |
|
"loss": 0.5852, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 7.003776147927523e-06, |
|
"loss": 0.5236, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 8.100201265703026e-06, |
|
"loss": 0.5849, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 9.270995592595313e-06, |
|
"loss": 0.5388, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 1.051471993293929e-05, |
|
"loss": 0.5372, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 1.1829845442054121e-05, |
|
"loss": 0.5343, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 1.3214755505566749e-05, |
|
"loss": 0.5689, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 1.4667747726642696e-05, |
|
"loss": 0.5281, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 1.6187036018636972e-05, |
|
"loss": 0.5893, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 1.7770752800649943e-05, |
|
"loss": 0.538, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 1.941695129323498e-05, |
|
"loss": 0.5953, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 2.1123607911480585e-05, |
|
"loss": 0.5626, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 2.2888624752492827e-05, |
|
"loss": 0.5227, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 2.4709832174232166e-05, |
|
"loss": 0.4595, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 2.658499146254842e-05, |
|
"loss": 0.4945, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 2.8511797583108267e-05, |
|
"loss": 0.4842, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 3.0487882014855305e-05, |
|
"loss": 0.5576, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 3.251081566150148e-05, |
|
"loss": 0.5323, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 3.45781118374983e-05, |
|
"loss": 0.551, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 3.668722932477494e-05, |
|
"loss": 0.6077, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 3.883557549653519e-05, |
|
"loss": 0.5575, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 4.102050950423e-05, |
|
"loss": 0.5375, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 4.3239345523814906e-05, |
|
"loss": 0.5252, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 4.548935605729356e-05, |
|
"loss": 0.5687, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 4.776777528547973e-05, |
|
"loss": 0.5572, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 5.007180246788566e-05, |
|
"loss": 0.4855, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 5.239860538550981e-05, |
|
"loss": 0.5661, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 5.4745323822347075e-05, |
|
"loss": 0.4792, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 5.710907308129398e-05, |
|
"loss": 0.568, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 5.948694753015834e-05, |
|
"loss": 0.5744, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 6.18760241733918e-05, |
|
"loss": 0.5533, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 6.427336624518557e-05, |
|
"loss": 0.5154, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 6.667602681947584e-05, |
|
"loss": 0.5253, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 6.908105243243853e-05, |
|
"loss": 0.511, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 7.148548671303826e-05, |
|
"loss": 0.5867, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 7.388637401713134e-05, |
|
"loss": 0.5813, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 7.628076306068698e-05, |
|
"loss": 0.5911, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 7.866571054763731e-05, |
|
"loss": 0.5641, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 8.103828478792738e-05, |
|
"loss": 0.5811, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 8.339556930128123e-05, |
|
"loss": 0.4312, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 8.573466640227146e-05, |
|
"loss": 0.5149, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 8.805270076230236e-05, |
|
"loss": 0.5371, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 9.0346822944092e-05, |
|
"loss": 0.5879, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 9.261421290433879e-05, |
|
"loss": 0.6111, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 9.485208346024451e-05, |
|
"loss": 0.5744, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 9.705768371566385e-05, |
|
"loss": 0.5572, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 9.922830244262129e-05, |
|
"loss": 0.5072, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 0.00010136127141409314, |
|
"loss": 0.6262, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 0.00010345396868391098, |
|
"loss": 0.5653, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 0.00010550382180978354, |
|
"loss": 0.629, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 0.00010750831101546573, |
|
"loss": 0.501, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 0.00010946497228818022, |
|
"loss": 0.6152, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 0.0001113714004075095, |
|
"loss": 0.5848, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 0.00011322525190199372, |
|
"loss": 0.6411, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 0.00011502424792984731, |
|
"loss": 0.5627, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 0.00011676617708021282, |
|
"loss": 0.62, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 0.289399653673172, |
|
"eval_runtime": 9.1611, |
|
"eval_samples_per_second": 75.973, |
|
"eval_steps_per_second": 9.497, |
|
"step": 1792 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 4.9300467408522294e-06, |
|
"loss": 0.5907, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 5.853621386191686e-06, |
|
"loss": 0.551, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 6.85263374207898e-06, |
|
"loss": 0.6016, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 7.92588273195002e-06, |
|
"loss": 0.5671, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 0.21771365404129028, |
|
"eval_runtime": 29.2775, |
|
"eval_samples_per_second": 22.44, |
|
"eval_steps_per_second": 2.835, |
|
"step": 1812 |
|
} |
|
], |
|
"max_steps": 2265, |
|
"num_train_epochs": 5, |
|
"total_flos": 1891623665664000.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|