|
{ |
|
"best_metric": 3.550342321395874, |
|
"best_model_checkpoint": "output/eminem/checkpoint-920", |
|
"epoch": 2.0, |
|
"global_step": 920, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 0.00013715876234566868, |
|
"loss": 4.4386, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 0.00013703509896122095, |
|
"loss": 4.1948, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00013682915852268886, |
|
"loss": 4.0957, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00013654118862484264, |
|
"loss": 3.9438, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00013617153548351626, |
|
"loss": 4.058, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00013572064351936462, |
|
"loss": 4.0441, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00013518905482355273, |
|
"loss": 3.872, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00013457740850601892, |
|
"loss": 4.2427, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00013388643992709594, |
|
"loss": 4.0466, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0001331169798134139, |
|
"loss": 3.9197, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00013226995325914744, |
|
"loss": 4.0498, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00013134637861380834, |
|
"loss": 3.9624, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0001303473662579206, |
|
"loss": 3.8491, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00012927411726804995, |
|
"loss": 3.8877, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00012812792197279278, |
|
"loss": 3.9778, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00012691015840146053, |
|
"loss": 3.9312, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00012562229062732468, |
|
"loss": 3.8452, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00012426586700741422, |
|
"loss": 3.9473, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00012284251832098172, |
|
"loss": 4.0293, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00012135395580887633, |
|
"loss": 3.8128, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00011980196911618039, |
|
"loss": 4.0833, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.0001181884241405837, |
|
"loss": 3.7779, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00011651526078908192, |
|
"loss": 3.8247, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00011478449064569633, |
|
"loss": 3.9986, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00011299819455301873, |
|
"loss": 4.0396, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0001111585201104895, |
|
"loss": 3.8924, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.000109267679092416, |
|
"loss": 3.8117, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00010732794478883606, |
|
"loss": 3.9015, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00010534164927242335, |
|
"loss": 3.7786, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0001033111805947203, |
|
"loss": 4.0211, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00010123897991506982, |
|
"loss": 3.8728, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.912753856569734e-05, |
|
"loss": 3.9266, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.697939505647188e-05, |
|
"loss": 3.9463, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 9.479713202294696e-05, |
|
"loss": 3.7152, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.258337312135107e-05, |
|
"loss": 3.9496, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 9.034077987426021e-05, |
|
"loss": 3.7439, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.807204847074523e-05, |
|
"loss": 3.7879, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.577990652484077e-05, |
|
"loss": 3.7768, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 8.34671097962332e-05, |
|
"loss": 3.6851, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 8.113643887711011e-05, |
|
"loss": 3.7911, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.879069584915438e-05, |
|
"loss": 3.6839, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.643270091470234e-05, |
|
"loss": 3.7319, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.406528900611617e-05, |
|
"loss": 3.6703, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.169130637744674e-05, |
|
"loss": 3.7467, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 6.931360718248504e-05, |
|
"loss": 3.8521, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.693505004331577e-05, |
|
"loss": 3.6709, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.455849461349907e-05, |
|
"loss": 3.6242, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.218679814001198e-05, |
|
"loss": 3.6225, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.9822812028083505e-05, |
|
"loss": 3.8016, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.746937841305257e-05, |
|
"loss": 3.6923, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 5.512932674337138e-05, |
|
"loss": 3.894, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 5.280547037886122e-05, |
|
"loss": 3.7461, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.050060320831149e-05, |
|
"loss": 3.5966, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.821749629048772e-05, |
|
"loss": 3.7551, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.595889452258756e-05, |
|
"loss": 3.8694, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.372751334014969e-05, |
|
"loss": 3.7224, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.1526035452383523e-05, |
|
"loss": 3.915, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.935710761684453e-05, |
|
"loss": 3.8484, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.722333745733311e-05, |
|
"loss": 3.7062, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.512729032884219e-05, |
|
"loss": 3.676, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.3071486233323674e-05, |
|
"loss": 3.6448, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.105839678998049e-05, |
|
"loss": 3.9731, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.9090442263728265e-05, |
|
"loss": 3.6278, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.716998865539764e-05, |
|
"loss": 3.6389, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.5299344857176957e-05, |
|
"loss": 3.6411, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.3480759876714295e-05, |
|
"loss": 3.7764, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 2.1716420133216482e-05, |
|
"loss": 3.7769, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 2.0008446828796293e-05, |
|
"loss": 3.528, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.8358893398227267e-05, |
|
"loss": 3.819, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.6769743040173313e-05, |
|
"loss": 3.6014, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.5242906332860249e-05, |
|
"loss": 3.6993, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.3780218937056495e-05, |
|
"loss": 3.7919, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2383439389124231e-05, |
|
"loss": 3.6903, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.105424698679451e-05, |
|
"loss": 3.4355, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.794239770208025e-06, |
|
"loss": 3.7661, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.604932600649156e-06, |
|
"loss": 4.0533, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.487755339282637e-06, |
|
"loss": 3.7796, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.444051128083183e-06, |
|
"loss": 3.7312, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 5.4750747750241e-06, |
|
"loss": 3.7231, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.581991245466992e-06, |
|
"loss": 3.7788, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.7658742615658213e-06, |
|
"loss": 3.7838, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.027705011369445e-06, |
|
"loss": 3.7598, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.3683709691745994e-06, |
|
"loss": 3.954, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.7886648285474887e-06, |
|
"loss": 3.7673, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.289283549296875e-06, |
|
"loss": 3.8811, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 8.708275195444353e-07, |
|
"loss": 3.6489, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.337998338997259e-07, |
|
"loss": 3.6089, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.786056886076668e-07, |
|
"loss": 3.5491, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0555189439568316e-07, |
|
"loss": 3.7324, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.484650760624615e-08, |
|
"loss": 3.7782, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 3.7048158645629883, |
|
"eval_runtime": 29.3019, |
|
"eval_samples_per_second": 22.388, |
|
"eval_steps_per_second": 2.798, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.9992265680461966e-08, |
|
"loss": 3.6362, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.0, |
|
"loss": 3.3994, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 3.5555710792541504, |
|
"eval_runtime": 13.5198, |
|
"eval_samples_per_second": 44.675, |
|
"eval_steps_per_second": 5.621, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.9992265680461966e-08, |
|
"loss": 3.5614, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5992243352901425e-07, |
|
"loss": 3.7249, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.5965067033469397e-07, |
|
"loss": 3.4991, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 6.389441019077102e-07, |
|
"loss": 3.439, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.97477084600295e-07, |
|
"loss": 3.6568, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4348315849926483e-06, |
|
"loss": 3.5489, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.9504976673012086e-06, |
|
"loss": 3.6268, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 2.5438740879409643e-06, |
|
"loss": 3.8246, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.2142689965485674e-06, |
|
"loss": 3.5789, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.960900742648913e-06, |
|
"loss": 3.618, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.782898787024646e-06, |
|
"loss": 3.6802, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 5.679304716725914e-06, |
|
"loss": 3.6302, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 6.649073362537048e-06, |
|
"loss": 3.5926, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 7.691074017597052e-06, |
|
"loss": 3.5005, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 8.804091755753263e-06, |
|
"loss": 3.6247, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 9.986828848110884e-06, |
|
"loss": 3.5699, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.1237906276126821e-05, |
|
"loss": 3.5435, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.2555865339483672e-05, |
|
"loss": 3.5681, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3939169356868945e-05, |
|
"loss": 3.5438, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.5386205457676803e-05, |
|
"loss": 3.6615, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.6895286462543014e-05, |
|
"loss": 3.5334, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.8464652850520678e-05, |
|
"loss": 3.5256, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.009247481060283e-05, |
|
"loss": 3.5908, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.1776854375200328e-05, |
|
"loss": 3.6817, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.351582763308709e-05, |
|
"loss": 3.6254, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.5307367019232758e-05, |
|
"loss": 3.5371, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.714938367885288e-05, |
|
"loss": 3.6406, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.9039729902920112e-05, |
|
"loss": 3.5787, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 3.097620163229676e-05, |
|
"loss": 3.5732, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 3.295654102756921e-05, |
|
"loss": 3.5091, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 3.4978439101588006e-05, |
|
"loss": 3.617, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 3.703953841164292e-05, |
|
"loss": 3.582, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 3.913743580813637e-05, |
|
"loss": 3.6594, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.126968523654786e-05, |
|
"loss": 3.6063, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.343380058942427e-05, |
|
"loss": 3.6137, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.562725860507034e-05, |
|
"loss": 3.5565, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.784750180955822e-05, |
|
"loss": 3.6099, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 5.009194149862813e-05, |
|
"loss": 3.6417, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 5.235796075600178e-05, |
|
"loss": 3.7049, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.4642917504589275e-05, |
|
"loss": 3.7319, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.694414758703346e-05, |
|
"loss": 3.554, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.92589678719975e-05, |
|
"loss": 3.5522, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 6.158467938257645e-05, |
|
"loss": 3.6115, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 6.391857044318355e-05, |
|
"loss": 3.6475, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 6.625791984124255e-05, |
|
"loss": 3.5034, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 6.859999999999999e-05, |
|
"loss": 3.5767, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 7.094208015875743e-05, |
|
"loss": 3.6471, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 7.328142955681643e-05, |
|
"loss": 3.6515, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 7.561532061742353e-05, |
|
"loss": 3.6649, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 7.794103212800247e-05, |
|
"loss": 3.4103, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 8.025585241296653e-05, |
|
"loss": 3.5488, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 8.25570824954107e-05, |
|
"loss": 3.5076, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 8.484203924399819e-05, |
|
"loss": 3.5708, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 8.710805850137184e-05, |
|
"loss": 3.7272, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 8.935249819044176e-05, |
|
"loss": 3.6548, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.157274139492964e-05, |
|
"loss": 3.6463, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.376619941057571e-05, |
|
"loss": 3.484, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.593031476345212e-05, |
|
"loss": 3.5106, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.80625641918636e-05, |
|
"loss": 3.6845, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 0.00010016046158835706, |
|
"loss": 3.5078, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 0.00010222156089841198, |
|
"loss": 3.62, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 0.00010424345897243078, |
|
"loss": 3.644, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 0.00010622379836770322, |
|
"loss": 3.7684, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 0.00010816027009707987, |
|
"loss": 3.5978, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 0.0001100506163211471, |
|
"loss": 3.5937, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 0.00011189263298076723, |
|
"loss": 3.6434, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 0.00011368417236691289, |
|
"loss": 3.6893, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 0.00011542314562479964, |
|
"loss": 3.6739, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 0.00011710752518939715, |
|
"loss": 3.5122, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 0.00011873534714947934, |
|
"loss": 3.6633, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 0.00012030471353745696, |
|
"loss": 3.5101, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 0.00012181379454232318, |
|
"loss": 3.6736, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 0.00012326083064313103, |
|
"loss": 3.4271, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 0.0001246441346605163, |
|
"loss": 3.5088, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 0.00012596209372387317, |
|
"loss": 3.5845, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 0.0001272131711518891, |
|
"loss": 3.6172, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 0.00012839590824424672, |
|
"loss": 3.5649, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 0.00012950892598240292, |
|
"loss": 3.5506, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 0.00013055092663746294, |
|
"loss": 3.4779, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 0.00013152069528327408, |
|
"loss": 3.5707, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 0.00013241710121297533, |
|
"loss": 3.497, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 0.0001332390992573511, |
|
"loss": 3.6343, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 0.00013398573100345144, |
|
"loss": 3.5251, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 0.00013465612591205902, |
|
"loss": 3.7421, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 0.00013524950233269879, |
|
"loss": 3.5967, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 0.00013576516841500732, |
|
"loss": 3.5853, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 0.0001362025229153997, |
|
"loss": 3.7016, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 0.00013656105589809228, |
|
"loss": 3.7649, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 0.0001368403493296653, |
|
"loss": 3.6488, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 0.000137040077566471, |
|
"loss": 3.5113, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 0.00013716000773431953, |
|
"loss": 3.4688, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 0.0001372, |
|
"loss": 3.5363, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 3.550342321395874, |
|
"eval_runtime": 15.5302, |
|
"eval_samples_per_second": 38.892, |
|
"eval_steps_per_second": 4.894, |
|
"step": 920 |
|
} |
|
], |
|
"max_steps": 920, |
|
"num_train_epochs": 2, |
|
"total_flos": 959594987520000.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|