|
{ |
|
"best_metric": 1.9903991222381592, |
|
"best_model_checkpoint": "output/eminem/checkpoint-1824", |
|
"epoch": 4.0, |
|
"global_step": 1824, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.7203068149785564e-07, |
|
"loss": 2.6483, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.899165231781454e-07, |
|
"loss": 2.495, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.1886129228032032e-06, |
|
"loss": 2.7138, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.6676447463390023e-06, |
|
"loss": 2.5829, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.2264411225088625e-06, |
|
"loss": 2.7096, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.8643361231111207e-06, |
|
"loss": 2.4466, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 3.580569556618851e-06, |
|
"loss": 2.7, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.3742878741139976e-06, |
|
"loss": 2.718, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 5.244545186477934e-06, |
|
"loss": 2.6071, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 6.190304391625697e-06, |
|
"loss": 2.5909, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 7.210438410439488e-06, |
|
"loss": 2.5627, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.303731529930944e-06, |
|
"loss": 2.4577, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.468880852028573e-06, |
|
"loss": 2.5309, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.0704497846266275e-05, |
|
"loss": 2.7378, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.200911000452175e-05, |
|
"loss": 2.6599, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.338116259583136e-05, |
|
"loss": 2.3352, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.481902051919347e-05, |
|
"loss": 2.7494, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.6320970252148078e-05, |
|
"loss": 2.395, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.7885221892815092e-05, |
|
"loss": 2.5425, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.950991129295317e-05, |
|
"loss": 2.6284, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.11931022795012e-05, |
|
"loss": 2.4998, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.2932788961951275e-05, |
|
"loss": 2.5979, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.4726898122808088e-05, |
|
"loss": 2.7143, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.6573291688279658e-05, |
|
"loss": 2.4124, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.8469769276260912e-05, |
|
"loss": 2.6573, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.0414070818571297e-05, |
|
"loss": 2.6071, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.24038792543193e-05, |
|
"loss": 2.8709, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.4436823291189305e-05, |
|
"loss": 2.6786, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.651048023135391e-05, |
|
"loss": 2.6154, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.8622378858650226e-05, |
|
"loss": 2.6546, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.077000238357357e-05, |
|
"loss": 2.6476, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.295079144258355e-05, |
|
"loss": 2.5445, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.516214714814676e-05, |
|
"loss": 2.4619, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.740143418587853e-05, |
|
"loss": 2.5966, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.9665983955098734e-05, |
|
"loss": 2.4876, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 5.1953097749051775e-05, |
|
"loss": 2.6759, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.426004997100832e-05, |
|
"loss": 2.4991, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 5.658409138240877e-05, |
|
"loss": 2.5614, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.892245237918353e-05, |
|
"loss": 2.6733, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 6.127234629234317e-05, |
|
"loss": 2.7397, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 6.363097270890295e-05, |
|
"loss": 2.7079, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 6.599552080918974e-05, |
|
"loss": 2.5227, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 6.836317271654733e-05, |
|
"loss": 2.6247, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.073110685545387e-05, |
|
"loss": 2.5805, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.309650131404767e-05, |
|
"loss": 2.7595, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 7.545653720705101e-05, |
|
"loss": 2.5378, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 7.780840203509109e-05, |
|
"loss": 2.6695, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 8.014929303640662e-05, |
|
"loss": 2.6016, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 8.2476420526954e-05, |
|
"loss": 2.8115, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.478701122492476e-05, |
|
"loss": 2.8291, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.707831155571868e-05, |
|
"loss": 2.4987, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.934759093343136e-05, |
|
"loss": 2.5696, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 9.15921450149434e-05, |
|
"loss": 2.6025, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.380929892273904e-05, |
|
"loss": 2.6397, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 9.599641043260666e-05, |
|
"loss": 2.5424, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 9.815087312242802e-05, |
|
"loss": 2.7226, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00010027011947830137, |
|
"loss": 2.6259, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00010235162395429476, |
|
"loss": 2.6201, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.0001043929059821886, |
|
"loss": 2.7705, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00010639153292761396, |
|
"loss": 2.7333, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00010834512298907064, |
|
"loss": 2.6623, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00011025134803636347, |
|
"loss": 2.8, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00011210793638507988, |
|
"loss": 2.6572, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00011391267550379957, |
|
"loss": 2.8413, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00011566341465080861, |
|
"loss": 2.6584, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00011735806743718085, |
|
"loss": 2.743, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00011899461431316495, |
|
"loss": 2.6499, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00012057110497492171, |
|
"loss": 2.6371, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00012208566068873709, |
|
"loss": 2.7359, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00012353647652994535, |
|
"loss": 2.5658, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00012492182353389096, |
|
"loss": 2.7984, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.0001262400507563695, |
|
"loss": 2.6983, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.0001274895872410881, |
|
"loss": 2.7025, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00012866894389180423, |
|
"loss": 2.6162, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.00012977671524691021, |
|
"loss": 2.7679, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.0001308115811543474, |
|
"loss": 2.408, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.00013177230834485735, |
|
"loss": 2.7832, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.00013265775190169125, |
|
"loss": 2.7911, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.00013346685662502964, |
|
"loss": 2.7389, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.00013419865828948326, |
|
"loss": 2.6844, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.00013485228479317903, |
|
"loss": 2.8377, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.00013542695719706048, |
|
"loss": 2.4116, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.00013592199065316375, |
|
"loss": 2.5933, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.00013633679522076416, |
|
"loss": 2.7723, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.0001366708765694196, |
|
"loss": 2.7647, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.0001369238365680734, |
|
"loss": 2.6368, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.0001370953737595148, |
|
"loss": 2.3958, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.00013718528371963105, |
|
"loss": 2.7726, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 0.00013719345930102362, |
|
"loss": 2.6424, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 0.00013711989076069762, |
|
"loss": 2.807, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.00013696466577167284, |
|
"loss": 2.5944, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 2.5120232105255127, |
|
"eval_runtime": 30.8025, |
|
"eval_samples_per_second": 20.81, |
|
"eval_steps_per_second": 2.63, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 0.00013672796931850215, |
|
"loss": 2.5617, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 0.00013641008347682184, |
|
"loss": 2.3941, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 0.0001360113870771968, |
|
"loss": 2.609, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 0.000135532355253661, |
|
"loss": 2.6935, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 0.00013497355887749114, |
|
"loss": 2.4847, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 0.00013433566387688887, |
|
"loss": 2.5804, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 0.00013361943044338116, |
|
"loss": 2.493, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 0.000132825712125886, |
|
"loss": 2.432, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 0.00013195545481352208, |
|
"loss": 2.3443, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 0.0001310096956083743, |
|
"loss": 2.5815, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 0.0001299895615895605, |
|
"loss": 2.5337, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 0.00012889626847006907, |
|
"loss": 2.3796, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 0.00012773111914797142, |
|
"loss": 2.5178, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 0.00012649550215373373, |
|
"loss": 2.4299, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 0.0001251908899954784, |
|
"loss": 2.4451, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 0.00012381883740416864, |
|
"loss": 2.6521, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 0.00012238097948080652, |
|
"loss": 2.403, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 0.00012087902974785178, |
|
"loss": 2.601, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 0.00011931477810718475, |
|
"loss": 2.4694, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 0.00011769008870704702, |
|
"loss": 2.5935, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 0.0001160068977204988, |
|
"loss": 2.5179, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 0.00011426721103804855, |
|
"loss": 2.7228, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 0.00011247310187719211, |
|
"loss": 2.5971, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 0.00011062670831172054, |
|
"loss": 2.5625, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 0.0001087302307237391, |
|
"loss": 2.7913, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 0.00010678592918142871, |
|
"loss": 2.4481, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 0.00010479612074568051, |
|
"loss": 2.4637, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 0.00010276317670881092, |
|
"loss": 2.7515, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 0.0001006895197686461, |
|
"loss": 2.685, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 9.857762114134977e-05, |
|
"loss": 2.6865, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 9.642999761642645e-05, |
|
"loss": 2.586, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 9.424920855741624e-05, |
|
"loss": 2.5638, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 9.203785285185348e-05, |
|
"loss": 2.8532, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 8.979856581412148e-05, |
|
"loss": 2.4697, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 8.753401604490128e-05, |
|
"loss": 2.4653, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 8.5246902250948e-05, |
|
"loss": 2.4832, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 8.293995002899144e-05, |
|
"loss": 2.5514, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 8.061590861759147e-05, |
|
"loss": 2.6216, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 7.827754762081649e-05, |
|
"loss": 2.4314, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 7.592765370765685e-05, |
|
"loss": 2.702, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 7.356902729109682e-05, |
|
"loss": 2.3754, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 7.120447919081049e-05, |
|
"loss": 2.4599, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 6.883682728345293e-05, |
|
"loss": 2.7681, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 6.646889314454614e-05, |
|
"loss": 2.4768, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 6.410349868595235e-05, |
|
"loss": 2.6323, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 6.174346279294876e-05, |
|
"loss": 2.4092, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 5.939159796490916e-05, |
|
"loss": 2.6557, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 5.705070696359339e-05, |
|
"loss": 2.3712, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 5.4723579473046015e-05, |
|
"loss": 2.3837, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 5.241298877507526e-05, |
|
"loss": 2.4259, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 5.01216884442811e-05, |
|
"loss": 2.6717, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.785240906656888e-05, |
|
"loss": 2.361, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.560785498505662e-05, |
|
"loss": 2.4714, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.339070107726098e-05, |
|
"loss": 2.6358, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.120358956739335e-05, |
|
"loss": 2.5129, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 3.904912687757176e-05, |
|
"loss": 2.5261, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 3.6929880521698846e-05, |
|
"loss": 2.5826, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.484837604570524e-05, |
|
"loss": 2.3181, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.2807094017811405e-05, |
|
"loss": 2.2812, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.080846707238584e-05, |
|
"loss": 2.479, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.885487701092958e-05, |
|
"loss": 2.6417, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.694865196363672e-05, |
|
"loss": 2.6846, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.509206361492011e-05, |
|
"loss": 2.4952, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.3287324496200448e-05, |
|
"loss": 2.5992, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1536585349191225e-05, |
|
"loss": 2.585, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.9841932562819154e-05, |
|
"loss": 2.597, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.8205385686835072e-05, |
|
"loss": 2.2072, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.6628895025078307e-05, |
|
"loss": 2.5062, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.5114339311262777e-05, |
|
"loss": 2.5392, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.3663523470054795e-05, |
|
"loss": 2.5593, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.2278176466109037e-05, |
|
"loss": 2.5837, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.0959949243630505e-05, |
|
"loss": 2.3897, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 9.710412758911909e-06, |
|
"loss": 2.4527, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 8.531056108195637e-06, |
|
"loss": 2.5424, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.423284753089891e-06, |
|
"loss": 2.4882, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.3884188456526086e-06, |
|
"loss": 2.4503, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 5.427691655142655e-06, |
|
"loss": 2.3948, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.5422480983086555e-06, |
|
"loss": 2.6461, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 3.7331433749702816e-06, |
|
"loss": 2.4818, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 3.0013417105168102e-06, |
|
"loss": 2.6764, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 2.347715206820977e-06, |
|
"loss": 2.4561, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.7730428029395095e-06, |
|
"loss": 2.4363, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.2780093468362156e-06, |
|
"loss": 2.5073, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 8.632047792358677e-07, |
|
"loss": 2.3678, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.291234305804284e-07, |
|
"loss": 2.5126, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 2.761634319266075e-07, |
|
"loss": 2.5024, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.0462624048520178e-07, |
|
"loss": 2.4416, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.4716280368937374e-08, |
|
"loss": 2.3922, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 6.540698976388004e-09, |
|
"loss": 2.7093, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 8.010923930238477e-08, |
|
"loss": 2.4296, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 2.3533422832715752e-07, |
|
"loss": 2.6948, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 2.4912426471710205, |
|
"eval_runtime": 30.6235, |
|
"eval_samples_per_second": 20.932, |
|
"eval_steps_per_second": 2.645, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.1246491964533778e-05, |
|
"loss": 2.424, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.2598018517843792e-05, |
|
"loss": 2.5084, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.4018079085430647e-05, |
|
"loss": 2.3701, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.5504935827871155e-05, |
|
"loss": 2.4517, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.7056769162026474e-05, |
|
"loss": 2.6206, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.867167998781036e-05, |
|
"loss": 2.5704, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 2.0347692012266013e-05, |
|
"loss": 2.4849, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 2.208275416810954e-05, |
|
"loss": 2.5616, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 2.3874743123783855e-05, |
|
"loss": 2.3109, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 2.572146588194454e-05, |
|
"loss": 2.4892, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 2.7620662463202956e-05, |
|
"loss": 2.443, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 2.957000867184013e-05, |
|
"loss": 2.4545, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.156711894010725e-05, |
|
"loss": 2.4657, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.360954924763191e-05, |
|
"loss": 2.1432, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.569480011235739e-05, |
|
"loss": 2.5793, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.782031964935521e-05, |
|
"loss": 2.3062, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.998350669376499e-05, |
|
"loss": 2.3345, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 4.218171398404682e-05, |
|
"loss": 2.5329, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 4.4412251401639226e-05, |
|
"loss": 2.201, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 4.667238926307116e-05, |
|
"loss": 2.5198, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 4.895936166048787e-05, |
|
"loss": 2.5241, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 5.1270369846509574e-05, |
|
"loss": 2.4079, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 5.360258565927803e-05, |
|
"loss": 2.5147, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 5.595315498349983e-05, |
|
"loss": 2.5063, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 5.83192012432515e-05, |
|
"loss": 2.3742, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 6.069782892226903e-05, |
|
"loss": 2.625, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 6.308612710742091e-05, |
|
"loss": 2.4382, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 6.548117305101883e-05, |
|
"loss": 2.4238, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 6.788003574761404e-05, |
|
"loss": 2.4157, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 7.027977952089649e-05, |
|
"loss": 2.5458, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 7.267746761631506e-05, |
|
"loss": 2.5048, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 7.507016579501221e-05, |
|
"loss": 2.5779, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 7.745494592468267e-05, |
|
"loss": 2.5149, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 7.98288895629588e-05, |
|
"loss": 2.4972, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 8.218909152893547e-05, |
|
"loss": 2.2248, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 8.453266345847049e-05, |
|
"loss": 2.3945, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 8.685673733890025e-05, |
|
"loss": 2.4223, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 8.915846901885212e-05, |
|
"loss": 2.5513, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.143504168885545e-05, |
|
"loss": 2.527, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 9.368366932849205e-05, |
|
"loss": 2.6526, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 9.590160011586822e-05, |
|
"loss": 2.6926, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 9.808611979523307e-05, |
|
"loss": 2.4965, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 0.00010023455499862926, |
|
"loss": 2.4985, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 0.00010234427651749925, |
|
"loss": 2.6502, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 0.00010441270252025704, |
|
"loss": 2.5715, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 0.00010643730171187608, |
|
"loss": 2.1817, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 0.00010841559643163376, |
|
"loss": 2.3754, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 0.00011034516568521922, |
|
"loss": 2.8706, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 0.0001122236481074937, |
|
"loss": 2.6822, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 0.0001140487448522782, |
|
"loss": 2.5523, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 0.0001158182224056317, |
|
"loss": 2.4356, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 0.00011752991531917766, |
|
"loss": 2.5516, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 0.00011918172886013147, |
|
"loss": 2.4273, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 0.00012077164157479157, |
|
"loss": 2.2673, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 0.00012229770776234816, |
|
"loss": 2.6163, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 0.000123758059855992, |
|
"loss": 2.5445, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 0.00012515091070840103, |
|
"loss": 2.3948, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 0.00012647455577881304, |
|
"loss": 2.5891, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 0.00012772737521900558, |
|
"loss": 2.5894, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 0.00012890783585563144, |
|
"loss": 2.5051, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 0.0001300144930664832, |
|
"loss": 2.4751, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 0.00013104599254838963, |
|
"loss": 2.4493, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 0.00013200107197458417, |
|
"loss": 2.4946, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 0.0001328785625395121, |
|
"loss": 2.4618, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 0.00013367739038918988, |
|
"loss": 2.5223, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 0.0001343965779353643, |
|
"loss": 2.4409, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 0.0001350352450518637, |
|
"loss": 2.3083, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 0.00013559261015167785, |
|
"loss": 2.4411, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 0.0001360679911434468, |
|
"loss": 2.3657, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 0.00013646080626618978, |
|
"loss": 2.4467, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 0.0001367705748012514, |
|
"loss": 2.3811, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 0.0001369969176605951, |
|
"loss": 2.5053, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 0.00013713955785072274, |
|
"loss": 2.7194, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 0.0001371983208116533, |
|
"loss": 2.5204, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 0.0001371731346305456, |
|
"loss": 2.5025, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 0.00013706403012970347, |
|
"loss": 2.4982, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 0.00013687114082885652, |
|
"loss": 2.7208, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 0.00013659470278176106, |
|
"loss": 2.5839, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 0.00013623505428732318, |
|
"loss": 2.5333, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 0.0001357926354755953, |
|
"loss": 2.5095, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 0.0001352679877691551, |
|
"loss": 2.2663, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 0.00013466175322052366, |
|
"loss": 2.5816, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 0.00013397467372643594, |
|
"loss": 2.4828, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 0.00013320759011992343, |
|
"loss": 2.5478, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 0.00013236144114132077, |
|
"loss": 2.5125, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 0.0001314372622894558, |
|
"loss": 2.5845, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 0.00013043618455442838, |
|
"loss": 2.6289, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 2.2795188426971436, |
|
"eval_runtime": 30.9908, |
|
"eval_samples_per_second": 22.329, |
|
"eval_steps_per_second": 2.807, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 0.000135403446735711, |
|
"loss": 2.1728, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 0.00013482119897697306, |
|
"loss": 2.2896, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 2.07898211479187, |
|
"eval_runtime": 29.7025, |
|
"eval_samples_per_second": 22.523, |
|
"eval_steps_per_second": 2.828, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 0.0001341589832217866, |
|
"loss": 2.2492, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 0.00013341759915466326, |
|
"loss": 2.4062, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 0.00013259794206290322, |
|
"loss": 2.4505, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 0.00013170100175545615, |
|
"loss": 2.3168, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 0.00013072786136763862, |
|
"loss": 2.4331, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 0.0001296796960531519, |
|
"loss": 2.3448, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 0.00012855777156497896, |
|
"loss": 2.5707, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 0.00012736344272687528, |
|
"loss": 2.3018, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 0.00012609815179729852, |
|
"loss": 2.2497, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 0.00012476342672775167, |
|
"loss": 2.2297, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 0.00012336087931764827, |
|
"loss": 2.5308, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 0.00012189220326791867, |
|
"loss": 2.4956, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 0.00012035917213571493, |
|
"loss": 2.259, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 0.0001187636371926859, |
|
"loss": 2.5147, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 0.00011710752518939737, |
|
"loss": 2.5795, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 0.00011539283602861218, |
|
"loss": 2.4607, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 0.00011362164035022977, |
|
"loss": 2.4427, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 0.00011179607703080082, |
|
"loss": 2.3976, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 0.00010991835060064675, |
|
"loss": 2.3127, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 0.00010799072858169134, |
|
"loss": 2.1402, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 0.0001060155387492263, |
|
"loss": 2.319, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 0.00010399516632091525, |
|
"loss": 2.6207, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 0.000101932051076431, |
|
"loss": 2.5146, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 9.982868441120418e-05, |
|
"loss": 2.513, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 9.768760632784142e-05, |
|
"loss": 2.5331, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 9.551140236884636e-05, |
|
"loss": 2.283, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 9.330270049434448e-05, |
|
"loss": 2.5398, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 9.106416790859171e-05, |
|
"loss": 2.4427, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 8.879850783908373e-05, |
|
"loss": 2.4624, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 8.650845627216773e-05, |
|
"loss": 2.3922, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 8.419677864910008e-05, |
|
"loss": 2.3814, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 8.186626652652247e-05, |
|
"loss": 2.4956, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 7.951973420541575e-05, |
|
"loss": 2.4434, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 7.716001533257615e-05, |
|
"loss": 2.2408, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 7.478995947873418e-05, |
|
"loss": 2.333, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 7.241242869744915e-05, |
|
"loss": 2.412, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 7.00302940689176e-05, |
|
"loss": 2.1938, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 6.764643223289375e-05, |
|
"loss": 2.4952, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 6.526372191488815e-05, |
|
"loss": 2.346, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 6.288504044985084e-05, |
|
"loss": 2.4523, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 6.051326030753298e-05, |
|
"loss": 2.3246, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 5.815124562372384e-05, |
|
"loss": 2.3573, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 5.580184874155164e-05, |
|
"loss": 2.5265, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 5.346790676702522e-05, |
|
"loss": 2.4817, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 5.115223814297577e-05, |
|
"loss": 2.3745, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 4.885763924553609e-05, |
|
"loss": 2.3791, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 4.658688100726727e-05, |
|
"loss": 2.438, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 4.434270557101133e-05, |
|
"loss": 2.2344, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 4.212782297850713e-05, |
|
"loss": 2.3451, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 3.9944907897778607e-05, |
|
"loss": 2.4611, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 3.7796596393228573e-05, |
|
"loss": 2.4182, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 3.568548274236061e-05, |
|
"loss": 2.5135, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 3.361411630295785e-05, |
|
"loss": 2.5817, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 3.158499843450252e-05, |
|
"loss": 2.3341, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 2.9600579477565787e-05, |
|
"loss": 2.3179, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 2.7663255794801203e-05, |
|
"loss": 2.29, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 2.57753668771237e-05, |
|
"loss": 2.5001, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 2.3939192518565938e-05, |
|
"loss": 2.5561, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 2.215695006322396e-05, |
|
"loss": 2.4063, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 2.0430791727616646e-05, |
|
"loss": 2.377, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 1.8762802001692604e-05, |
|
"loss": 2.6203, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 1.715499513162315e-05, |
|
"loss": 2.434, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 1.5609312687419068e-05, |
|
"loss": 2.2921, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 1.4127621218315069e-05, |
|
"loss": 2.2474, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 1.2711709998742125e-05, |
|
"loss": 2.4056, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 1.1363288867620807e-05, |
|
"loss": 2.1388, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 1.0083986163578448e-05, |
|
"loss": 2.4683, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 8.875346758583873e-06, |
|
"loss": 2.2937, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 7.73883019237949e-06, |
|
"loss": 2.4202, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 6.6758089099546965e-06, |
|
"loss": 2.5003, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 5.6875666041964786e-06, |
|
"loss": 2.3068, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 4.775296665718002e-06, |
|
"loss": 2.2147, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 3.940100741730249e-06, |
|
"loss": 2.4371, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 3.1829874057071e-06, |
|
"loss": 2.3255, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 2.504870939442012e-06, |
|
"loss": 2.3511, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 1.9065702289715435e-06, |
|
"loss": 2.587, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 1.3888077756974934e-06, |
|
"loss": 2.4066, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 9.522088239021213e-07, |
|
"loss": 2.3034, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 5.973006057099719e-07, |
|
"loss": 2.2354, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 3.2451170440812575e-07, |
|
"loss": 2.4044, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 1.3417153689369494e-07, |
|
"loss": 2.2793, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 2.650995587359215e-08, |
|
"loss": 2.3649, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 1.6569722969137058e-09, |
|
"loss": 2.5027, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 5.964259835509249e-08, |
|
"loss": 2.3313, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 2.0039681123962964e-07, |
|
"loss": 2.3823, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 4.2374963770060374e-07, |
|
"loss": 2.3593, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 7.294313593049221e-07, |
|
"loss": 2.4781, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 1.1170728381450536e-06, |
|
"loss": 2.4511, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 1.5862059626056908e-06, |
|
"loss": 2.0358, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 2.1362642126510735e-06, |
|
"loss": 2.4982, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 2.0775227546691895, |
|
"eval_runtime": 29.8853, |
|
"eval_samples_per_second": 22.386, |
|
"eval_steps_per_second": 2.811, |
|
"step": 1808 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 1.9689817301781333e-07, |
|
"loss": 2.3835, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 5.860097463115006e-08, |
|
"loss": 2.2235, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 1.628030220611354e-09, |
|
"loss": 2.1558, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 1.9903991222381592, |
|
"eval_runtime": 27.7097, |
|
"eval_samples_per_second": 22.844, |
|
"eval_steps_per_second": 2.887, |
|
"step": 1824 |
|
} |
|
], |
|
"max_steps": 2280, |
|
"num_train_epochs": 5, |
|
"total_flos": 1902336638976000.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|