|
{ |
|
"best_metric": 1.4596115350723267, |
|
"best_model_checkpoint": "output/boris-grebenshikov/checkpoint-1903", |
|
"epoch": 11.0, |
|
"global_step": 1903, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 0.00013690389360668606, |
|
"loss": 2.5933, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00013601813066569938, |
|
"loss": 2.3572, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00013455035782630487, |
|
"loss": 2.2816, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00013251324613578177, |
|
"loss": 2.2503, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00012992438165230672, |
|
"loss": 2.0864, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00012680611362733682, |
|
"loss": 2.1159, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00012318536156811003, |
|
"loss": 2.0993, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00011909338284586077, |
|
"loss": 1.8802, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00011456550285595239, |
|
"loss": 2.0531, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00010964081005941026, |
|
"loss": 1.9281, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.0001043618185385132, |
|
"loss": 2.1099, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.877410097954656e-05, |
|
"loss": 2.0119, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 9.292589525111794e-05, |
|
"loss": 1.9675, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.686768797438203e-05, |
|
"loss": 1.9512, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 8.065177868014528e-05, |
|
"loss": 2.047, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.433182831541305e-05, |
|
"loss": 1.9436, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 6.796239599704895e-05, |
|
"loss": 1.9677, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.159846801167835e-05, |
|
"loss": 1.8582, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 5.529498312790352e-05, |
|
"loss": 1.9952, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.9106358318734425e-05, |
|
"loss": 1.9317, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.3086018988597235e-05, |
|
"loss": 1.834, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.728593776039493e-05, |
|
"loss": 1.8923, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.1756185804197785e-05, |
|
"loss": 1.9544, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.6544500580870892e-05, |
|
"loss": 1.921, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 2.169587373223826e-05, |
|
"loss": 1.9539, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.725216267546246e-05, |
|
"loss": 1.9665, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.3251729254682012e-05, |
|
"loss": 2.0147, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.729108569369736e-06, |
|
"loss": 1.8575, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.7147108383636075e-06, |
|
"loss": 1.8372, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.234558873329575e-06, |
|
"loss": 1.7437, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.310063428006295e-06, |
|
"loss": 1.8886, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.578383626055595e-07, |
|
"loss": 1.9129, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.895572190242788e-07, |
|
"loss": 1.8767, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.8803279399871826, |
|
"eval_runtime": 11.5804, |
|
"eval_samples_per_second": 22.279, |
|
"eval_steps_per_second": 2.85, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0177397892100256e-07, |
|
"loss": 1.9274, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.744057536125183, |
|
"eval_runtime": 10.1523, |
|
"eval_samples_per_second": 22.458, |
|
"eval_steps_per_second": 2.856, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.523909530405488e-08, |
|
"loss": 1.937, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 5.534939285797931e-07, |
|
"loss": 1.8799, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.622351211524088e-06, |
|
"loss": 1.8935, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.243005142843674e-06, |
|
"loss": 1.873, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 5.402103933939619e-06, |
|
"loss": 1.8993, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 8.081859807874568e-06, |
|
"loss": 1.9433, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.1260195544315306e-05, |
|
"loss": 1.8802, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.491092636313743e-05, |
|
"loss": 1.9911, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.90039756482401e-05, |
|
"loss": 1.9398, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.350562273432663e-05, |
|
"loss": 1.7847, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.8378780715254437e-05, |
|
"loss": 1.8382, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 3.358330198522479e-05, |
|
"loss": 1.9425, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 3.907630899560607e-05, |
|
"loss": 1.9129, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.4812547502442855e-05, |
|
"loss": 1.8725, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 5.0744759394415807e-05, |
|
"loss": 1.9258, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.682407202970108e-05, |
|
"loss": 1.8416, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 6.300040087417042e-05, |
|
"loss": 1.8097, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 6.922286212378929e-05, |
|
"loss": 1.8724, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 7.544019191181583e-05, |
|
"loss": 1.8985, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 8.160116864715307e-05, |
|
"loss": 1.877, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 8.765503500441301e-05, |
|
"loss": 1.8627, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.35519160891186e-05, |
|
"loss": 1.7856, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 9.92432303329815e-05, |
|
"loss": 1.8525, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 0.00010468208973408737, |
|
"loss": 1.9036, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 0.00010982368614460176, |
|
"loss": 1.8999, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 0.00011462566042355846, |
|
"loss": 1.866, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 0.00011904845141345724, |
|
"loss": 1.9433, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 0.00012305562186562137, |
|
"loss": 1.8415, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 0.0001266141586291718, |
|
"loss": 1.8489, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 0.00012969474463050626, |
|
"loss": 1.7954, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 0.00013227200040257384, |
|
"loss": 1.8456, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 0.000134324693174103, |
|
"loss": 1.8414, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 0.00013583591179619617, |
|
"loss": 1.8602, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 0.00013679320606515522, |
|
"loss": 1.8914, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 0.00013718868929372445, |
|
"loss": 1.7701, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.7017050981521606, |
|
"eval_runtime": 10.2794, |
|
"eval_samples_per_second": 22.18, |
|
"eval_steps_per_second": 2.821, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 0.00013603195463831566, |
|
"loss": 1.8257, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 0.00013458123912165538, |
|
"loss": 1.7795, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 0.00013256759493713883, |
|
"loss": 1.6858, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 0.0001300082017869573, |
|
"loss": 1.8715, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 0.00012692489551105156, |
|
"loss": 1.7653, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 0.0001233439817914244, |
|
"loss": 1.7267, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 0.0001192960117213372, |
|
"loss": 1.7223, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 0.00011481552115415387, |
|
"loss": 1.7739, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 0.00010994073605561706, |
|
"loss": 1.7485, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 0.00010471324637338657, |
|
"loss": 1.7833, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 9.917765120627052e-05, |
|
"loss": 1.7504, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 9.338117830043871e-05, |
|
"loss": 1.7411, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 8.737328111894491e-05, |
|
"loss": 1.7067, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 8.120521692221671e-05, |
|
"loss": 1.7285, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 7.492960945918252e-05, |
|
"loss": 1.7692, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 6.860000000000001e-05, |
|
"loss": 1.7354, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 6.227039054081752e-05, |
|
"loss": 1.7446, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 5.599478307778333e-05, |
|
"loss": 1.8284, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 4.9826718881055135e-05, |
|
"loss": 1.7434, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 4.381882169956128e-05, |
|
"loss": 1.6965, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 3.8022348793729525e-05, |
|
"loss": 1.7405, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 3.24867536266134e-05, |
|
"loss": 1.7292, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 2.7259263944382986e-05, |
|
"loss": 1.7385, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 2.2384478845846205e-05, |
|
"loss": 1.6668, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 1.7903988278662788e-05, |
|
"loss": 1.591, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 1.3856018208575617e-05, |
|
"loss": 1.7083, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.0275104488948488e-05, |
|
"loss": 1.6955, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 7.191798213042723e-06, |
|
"loss": 1.7392, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 4.6324050628612214e-06, |
|
"loss": 1.7917, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 2.6187608783446213e-06, |
|
"loss": 1.7505, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.1680453616843376e-06, |
|
"loss": 1.7224, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 2.926355061606279e-07, |
|
"loss": 1.6373, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 0.0, |
|
"loss": 1.6567, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 1.6859837770462036, |
|
"eval_runtime": 10.9838, |
|
"eval_samples_per_second": 22.761, |
|
"eval_steps_per_second": 2.913, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.8089671428491253e-07, |
|
"loss": 1.7435, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 1.5615885257720947, |
|
"eval_runtime": 14.4733, |
|
"eval_samples_per_second": 15.753, |
|
"eval_steps_per_second": 2.004, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 1.1310706275549354e-08, |
|
"loss": 1.7668, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 4.0679393484475955e-07, |
|
"loss": 1.6029, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 1.3640882038038144e-06, |
|
"loss": 1.7397, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 2.875306825896957e-06, |
|
"loss": 1.6337, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 4.927999597426121e-06, |
|
"loss": 1.7185, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 7.505255369493709e-06, |
|
"loss": 1.7042, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 1.0585841370828143e-05, |
|
"loss": 1.6665, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 1.4144378134378619e-05, |
|
"loss": 1.6517, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 1.8151548586542735e-05, |
|
"loss": 1.7248, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 2.257433957644151e-05, |
|
"loss": 1.7293, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 2.7376313855398193e-05, |
|
"loss": 1.5925, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 3.25179102659126e-05, |
|
"loss": 1.7049, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 3.795676966701845e-05, |
|
"loss": 1.6687, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 4.3648083910881356e-05, |
|
"loss": 1.6983, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 4.954496499558694e-05, |
|
"loss": 1.785, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 5.559883135284689e-05, |
|
"loss": 1.698, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 6.175980808818411e-05, |
|
"loss": 1.6591, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 6.797713787621067e-05, |
|
"loss": 1.7702, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 7.419959912582961e-05, |
|
"loss": 1.7052, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 8.037592797029894e-05, |
|
"loss": 1.6658, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 8.645524060558421e-05, |
|
"loss": 1.6903, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 9.238745249755704e-05, |
|
"loss": 1.7102, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 9.812369100439384e-05, |
|
"loss": 1.7289, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 0.00010361669801477512, |
|
"loss": 1.688, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 0.00010882121928474553, |
|
"loss": 1.7392, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 0.00011369437726567332, |
|
"loss": 1.7228, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 0.00011819602435175987, |
|
"loss": 1.7298, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 0.0001222890736368625, |
|
"loss": 1.6633, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 0.00012593980445568464, |
|
"loss": 1.7014, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 0.0001291181401921254, |
|
"loss": 1.6952, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 0.00013179789606606037, |
|
"loss": 1.7324, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 0.0001339569948571563, |
|
"loss": 1.6753, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 0.00013557764878847588, |
|
"loss": 1.834, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 0.0001366465060714202, |
|
"loss": 1.6832, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 0.00013715476090469595, |
|
"loss": 1.703, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 1.588655710220337, |
|
"eval_runtime": 14.8949, |
|
"eval_samples_per_second": 15.307, |
|
"eval_steps_per_second": 1.947, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 0.000137098226021079, |
|
"loss": 1.6012, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 0.0001364773671842959, |
|
"loss": 1.7298, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 0.0001352972993518206, |
|
"loss": 1.6923, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 0.0001335677445351985, |
|
"loss": 1.7106, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 0.00013130295170506775, |
|
"loss": 1.6598, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 0.00012852157940074013, |
|
"loss": 1.6285, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 0.00012524654201146593, |
|
"loss": 1.6431, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 0.00012150482099579733, |
|
"loss": 1.5908, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 0.00011732724259432273, |
|
"loss": 1.6991, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 0.00011274822386709064, |
|
"loss": 1.5654, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 0.00010780548914799808, |
|
"loss": 1.6367, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 0.00010253975925213988, |
|
"loss": 1.7132, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 9.699441599659e-05, |
|
"loss": 1.6216, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 9.121514479846673e-05, |
|
"loss": 1.6058, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 8.52495582947446e-05, |
|
"loss": 1.5635, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 7.914680408462862e-05, |
|
"loss": 1.6392, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 7.295715982611233e-05, |
|
"loss": 1.5928, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 6.673161902252507e-05, |
|
"loss": 1.5735, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 6.052147091157344e-05, |
|
"loss": 1.645, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 5.4377877917969595e-05, |
|
"loss": 1.6497, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 4.8351454150810734e-05, |
|
"loss": 1.6139, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 4.249184841826729e-05, |
|
"loss": 1.5811, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 3.6847335194921544e-05, |
|
"loss": 1.5699, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 3.146441691157973e-05, |
|
"loss": 1.6003, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 2.638744084410121e-05, |
|
"loss": 1.6607, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 2.165823375751248e-05, |
|
"loss": 1.6131, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 1.7315757315399247e-05, |
|
"loss": 1.5716, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 1.3395787093490896e-05, |
|
"loss": 1.6196, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 9.930617841891443e-06, |
|
"loss": 1.6354, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 6.948797424159463e-06, |
|
"loss": 1.5669, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 4.474891625184719e-06, |
|
"loss": 1.604, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 2.5292817654967284e-06, |
|
"loss": 1.523, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 1.127996789363344e-06, |
|
"loss": 1.6719, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 2.8258121002494224e-07, |
|
"loss": 1.6116, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 0.0, |
|
"loss": 1.4979, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_loss": 1.516666054725647, |
|
"eval_runtime": 14.812, |
|
"eval_samples_per_second": 15.393, |
|
"eval_steps_per_second": 1.958, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 2.825812100249346e-07, |
|
"loss": 1.5673, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 1.127996789363329e-06, |
|
"loss": 1.5619, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 2.529281765496713e-06, |
|
"loss": 1.5238, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 4.4748916251846885e-06, |
|
"loss": 1.5144, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 6.948797424159478e-06, |
|
"loss": 1.5482, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 9.930617841891406e-06, |
|
"loss": 1.593, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 1.3395787093490912e-05, |
|
"loss": 1.6012, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 1.7315757315399193e-05, |
|
"loss": 1.5677, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 2.1658233757512504e-05, |
|
"loss": 1.5793, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 2.638744084410115e-05, |
|
"loss": 1.5031, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 3.1464416911579663e-05, |
|
"loss": 1.5173, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 3.684733519492136e-05, |
|
"loss": 1.489, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 4.249184841826722e-05, |
|
"loss": 1.6251, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 4.835145415081054e-05, |
|
"loss": 1.5882, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 5.4377877917969514e-05, |
|
"loss": 1.6055, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 6.052147091157324e-05, |
|
"loss": 1.5492, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 6.673161902252498e-05, |
|
"loss": 1.534, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 7.295715982611214e-05, |
|
"loss": 1.5354, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 7.914680408462853e-05, |
|
"loss": 1.6055, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 8.52495582947444e-05, |
|
"loss": 1.5166, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 9.121514479846663e-05, |
|
"loss": 1.5157, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 9.699441599658982e-05, |
|
"loss": 1.6171, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 0.00010253975925213981, |
|
"loss": 1.5676, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 0.0001078054891479979, |
|
"loss": 1.5382, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 0.00011274822386709058, |
|
"loss": 1.6057, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 0.00011732724259432258, |
|
"loss": 1.5982, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 0.00012150482099579729, |
|
"loss": 1.5857, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 0.00012524654201146596, |
|
"loss": 1.6016, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 0.00012852157940074007, |
|
"loss": 1.552, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 0.00013130295170506778, |
|
"loss": 1.6309, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 0.00013356774453519848, |
|
"loss": 1.6005, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 0.0001352972993518206, |
|
"loss": 1.5484, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 0.00013647736718429588, |
|
"loss": 1.5604, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 0.000137098226021079, |
|
"loss": 1.5962, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 1.5382394790649414, |
|
"eval_runtime": 14.8295, |
|
"eval_samples_per_second": 15.375, |
|
"eval_steps_per_second": 1.956, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 0.00013715476090469595, |
|
"loss": 1.5942, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 0.0001366465060714202, |
|
"loss": 1.4645, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 0.00013557764878847597, |
|
"loss": 1.5359, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 0.00013395699485715633, |
|
"loss": 1.5554, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 0.00013179789606606045, |
|
"loss": 1.5214, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 0.00012911814019212547, |
|
"loss": 1.4883, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 0.00012593980445568475, |
|
"loss": 1.536, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 0.00012228907363686257, |
|
"loss": 1.4736, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 0.00011819602435175999, |
|
"loss": 1.5588, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 0.00011369437726567339, |
|
"loss": 1.6177, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 0.00010882121928474568, |
|
"loss": 1.5433, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 0.00010361669801477519, |
|
"loss": 1.4821, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 9.812369100439403e-05, |
|
"loss": 1.5071, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 9.238745249755713e-05, |
|
"loss": 1.5067, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 8.645524060558428e-05, |
|
"loss": 1.5131, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 8.037592797029891e-05, |
|
"loss": 1.5808, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 7.419959912582969e-05, |
|
"loss": 1.5181, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 6.797713787621063e-05, |
|
"loss": 1.5158, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 6.17598080881842e-05, |
|
"loss": 1.5602, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 5.559883135284685e-05, |
|
"loss": 1.5248, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 4.954496499558702e-05, |
|
"loss": 1.6044, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 4.364808391088155e-05, |
|
"loss": 1.5675, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"learning_rate": 3.795676966701853e-05, |
|
"loss": 1.5317, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 3.2517910265912777e-05, |
|
"loss": 1.498, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 2.737631385539826e-05, |
|
"loss": 1.5435, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 2.2574339576441664e-05, |
|
"loss": 1.4874, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 1.815154858654279e-05, |
|
"loss": 1.4665, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 1.4144378134378748e-05, |
|
"loss": 1.522, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 1.0585841370828188e-05, |
|
"loss": 1.4874, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 7.505255369493801e-06, |
|
"loss": 1.4275, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 4.927999597426152e-06, |
|
"loss": 1.5011, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 2.8753068258970103e-06, |
|
"loss": 1.5038, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 1.3640882038038297e-06, |
|
"loss": 1.5051, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 4.067939348447824e-07, |
|
"loss": 1.508, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 1.1310706275549354e-08, |
|
"loss": 1.5261, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_loss": 1.4838725328445435, |
|
"eval_runtime": 14.6674, |
|
"eval_samples_per_second": 15.545, |
|
"eval_steps_per_second": 1.977, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 1.808967142848973e-07, |
|
"loss": 1.4801, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 9.141548212825557e-07, |
|
"loss": 1.4927, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 2.2050440662441005e-06, |
|
"loss": 1.4083, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 4.0429294332848074e-06, |
|
"loss": 1.3969, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 6.412669468442276e-06, |
|
"loss": 1.3985, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 9.294741022836825e-06, |
|
"loss": 1.4522, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 1.266540009450849e-05, |
|
"loss": 1.4855, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 1.649687744383478e-05, |
|
"loss": 1.3859, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 2.075760737094545e-05, |
|
"loss": 1.5523, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 2.5412487770347686e-05, |
|
"loss": 1.4851, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 3.0423169320298854e-05, |
|
"loss": 1.4955, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 3.57483714244302e-05, |
|
"loss": 1.4801, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 4.134422230273594e-05, |
|
"loss": 1.3948, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 4.7164620430075375e-05, |
|
"loss": 1.5297, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 5.316161434447666e-05, |
|
"loss": 1.5011, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 5.92857976961771e-05, |
|
"loss": 1.4813, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 6.548671628278832e-05, |
|
"loss": 1.476, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 7.171328371721156e-05, |
|
"loss": 1.3971, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 7.791420230382278e-05, |
|
"loss": 1.4009, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 8.403838565552322e-05, |
|
"loss": 1.4851, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 9.00353795699245e-05, |
|
"loss": 1.4137, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 9.585577769726395e-05, |
|
"loss": 1.4647, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 0.00010145162857556969, |
|
"loss": 1.3558, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 0.00010677683067970103, |
|
"loss": 1.4209, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 0.00011178751222965221, |
|
"loss": 1.5359, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 0.00011644239262905447, |
|
"loss": 1.5243, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 0.00012070312255616514, |
|
"loss": 1.5307, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 0.00012453459990549145, |
|
"loss": 1.3905, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 0.0001279052589771631, |
|
"loss": 1.4955, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 0.00013078733053155768, |
|
"loss": 1.501, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 0.00013315707056671514, |
|
"loss": 1.4948, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 0.00013499495593375586, |
|
"loss": 1.496, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 0.00013628584517871742, |
|
"loss": 1.4929, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 0.0001370191032857151, |
|
"loss": 1.5077, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 1.5043175220489502, |
|
"eval_runtime": 14.7576, |
|
"eval_samples_per_second": 15.45, |
|
"eval_steps_per_second": 1.965, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 0.00013718868929372445, |
|
"loss": 1.5034, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 8.03, |
|
"learning_rate": 0.00013679320606515522, |
|
"loss": 1.4037, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 0.00013583591179619614, |
|
"loss": 1.4957, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 0.000134324693174103, |
|
"loss": 1.5076, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 0.0001322720004025738, |
|
"loss": 1.503, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"learning_rate": 0.00012969474463050637, |
|
"loss": 1.4043, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 0.00012661415862917202, |
|
"loss": 1.4017, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 0.00012305562186562148, |
|
"loss": 1.4622, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 0.00011904845141345745, |
|
"loss": 1.4555, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"learning_rate": 0.00011462566042355861, |
|
"loss": 1.474, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 0.00010982368614460204, |
|
"loss": 1.4798, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 0.00010468208973408754, |
|
"loss": 1.4409, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"learning_rate": 9.924323033298181e-05, |
|
"loss": 1.4283, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 9.35519160891188e-05, |
|
"loss": 1.4248, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 8.76550350044131e-05, |
|
"loss": 1.4627, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 8.160116864715327e-05, |
|
"loss": 1.5012, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 7.544019191181593e-05, |
|
"loss": 1.3866, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 6.92228621237895e-05, |
|
"loss": 1.4909, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 6.300040087417043e-05, |
|
"loss": 1.385, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"learning_rate": 5.682407202970122e-05, |
|
"loss": 1.4766, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"learning_rate": 5.0744759394415834e-05, |
|
"loss": 1.4607, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"learning_rate": 4.481254750244299e-05, |
|
"loss": 1.4235, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 3.907630899560609e-05, |
|
"loss": 1.4085, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 3.358330198522491e-05, |
|
"loss": 1.4548, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"learning_rate": 2.8378780715254406e-05, |
|
"loss": 1.4757, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"learning_rate": 2.3505622734326714e-05, |
|
"loss": 1.3891, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"learning_rate": 1.900397564824009e-05, |
|
"loss": 1.3819, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"learning_rate": 1.4910926363137506e-05, |
|
"loss": 1.4202, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"learning_rate": 1.1260195544315306e-05, |
|
"loss": 1.3403, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 8.081859807874605e-06, |
|
"loss": 1.4776, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"learning_rate": 5.402103933939604e-06, |
|
"loss": 1.4531, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"learning_rate": 3.2430051428437045e-06, |
|
"loss": 1.373, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 1.6223512115240727e-06, |
|
"loss": 1.3284, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 5.534939285798083e-07, |
|
"loss": 1.3615, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 4.523909530404726e-08, |
|
"loss": 1.3743, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_loss": 1.464684247970581, |
|
"eval_runtime": 15.3121, |
|
"eval_samples_per_second": 14.89, |
|
"eval_steps_per_second": 1.894, |
|
"step": 1557 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 1.0177397892099493e-07, |
|
"loss": 1.4264, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 7.226328157040877e-07, |
|
"loss": 1.3427, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 9.08, |
|
"learning_rate": 1.9027006481794198e-06, |
|
"loss": 1.4135, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"learning_rate": 3.6322554648015487e-06, |
|
"loss": 1.3408, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 5.897048294932268e-06, |
|
"loss": 1.355, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 8.678420599259977e-06, |
|
"loss": 1.3903, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"learning_rate": 1.1953457988534113e-05, |
|
"loss": 1.3435, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 1.5695179004202806e-05, |
|
"loss": 1.4126, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 1.9872757405677322e-05, |
|
"loss": 1.3711, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"learning_rate": 2.445177613290951e-05, |
|
"loss": 1.4319, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 2.9394510852001993e-05, |
|
"loss": 1.3731, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 3.46602407478603e-05, |
|
"loss": 1.3164, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"learning_rate": 4.020558400341007e-05, |
|
"loss": 1.3927, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"learning_rate": 4.598485520153301e-05, |
|
"loss": 1.3304, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 5.195044170525524e-05, |
|
"loss": 1.3834, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"learning_rate": 5.80531959153711e-05, |
|
"loss": 1.4126, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"learning_rate": 6.424284017388749e-05, |
|
"loss": 1.3473, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 7.046838097747464e-05, |
|
"loss": 1.299, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 9.54, |
|
"learning_rate": 7.66785290884264e-05, |
|
"loss": 1.3673, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 8.282212208203013e-05, |
|
"loss": 1.3913, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"learning_rate": 8.884854584918912e-05, |
|
"loss": 1.3892, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 9.470815158173245e-05, |
|
"loss": 1.3557, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"learning_rate": 0.00010035266480507833, |
|
"loss": 1.396, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"learning_rate": 0.00010573558308842004, |
|
"loss": 1.3608, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 9.71, |
|
"learning_rate": 0.00011081255915589876, |
|
"loss": 1.3399, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 0.00011554176624248741, |
|
"loss": 1.3982, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"learning_rate": 0.00011988424268460073, |
|
"loss": 1.4392, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"learning_rate": 0.000123804212906509, |
|
"loss": 1.4083, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"learning_rate": 0.00012726938215810852, |
|
"loss": 1.434, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"learning_rate": 0.00013025120257584047, |
|
"loss": 1.3252, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 0.00013272510837481527, |
|
"loss": 1.3899, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 0.00013467071823450324, |
|
"loss": 1.3883, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 9.94, |
|
"learning_rate": 0.00013607200321063664, |
|
"loss": 1.4395, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 0.00013691741878997505, |
|
"loss": 1.4208, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 0.0001372, |
|
"loss": 1.4499, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_loss": 1.4852185249328613, |
|
"eval_runtime": 15.5041, |
|
"eval_samples_per_second": 14.706, |
|
"eval_steps_per_second": 1.87, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 10.03, |
|
"learning_rate": 0.00013691741878997505, |
|
"loss": 1.3342, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 10.06, |
|
"learning_rate": 0.0001360720032106367, |
|
"loss": 1.3537, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 10.09, |
|
"learning_rate": 0.00013467071823450327, |
|
"loss": 1.376, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 10.12, |
|
"learning_rate": 0.00013272510837481532, |
|
"loss": 1.3746, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 10.14, |
|
"learning_rate": 0.00013025120257584055, |
|
"loss": 1.3632, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 10.17, |
|
"learning_rate": 0.00012726938215810863, |
|
"loss": 1.4038, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 10.2, |
|
"learning_rate": 0.00012380421290650925, |
|
"loss": 1.3788, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 10.23, |
|
"learning_rate": 0.00011988424268460084, |
|
"loss": 1.3642, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 10.26, |
|
"learning_rate": 0.00011554176624248753, |
|
"loss": 1.3539, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 10.29, |
|
"learning_rate": 0.00011081255915589869, |
|
"loss": 1.371, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 10.32, |
|
"learning_rate": 0.00010573558308842038, |
|
"loss": 1.3995, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 10.35, |
|
"learning_rate": 0.00010035266480507848, |
|
"loss": 1.4698, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 10.38, |
|
"learning_rate": 9.47081515817326e-05, |
|
"loss": 1.3803, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 10.4, |
|
"learning_rate": 8.884854584918904e-05, |
|
"loss": 1.4079, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 10.43, |
|
"learning_rate": 8.282212208203053e-05, |
|
"loss": 1.4, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 10.46, |
|
"learning_rate": 7.667852908842657e-05, |
|
"loss": 1.3182, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 10.49, |
|
"learning_rate": 7.04683809774748e-05, |
|
"loss": 1.3298, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 10.52, |
|
"learning_rate": 6.424284017388742e-05, |
|
"loss": 1.348, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 10.55, |
|
"learning_rate": 5.8053195915371506e-05, |
|
"loss": 1.3953, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 10.58, |
|
"learning_rate": 5.1950441705255396e-05, |
|
"loss": 1.3451, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 10.61, |
|
"learning_rate": 4.5984855201533164e-05, |
|
"loss": 1.3076, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 10.64, |
|
"learning_rate": 4.0205584003410225e-05, |
|
"loss": 1.4287, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 10.66, |
|
"learning_rate": 3.4660240747860236e-05, |
|
"loss": 1.3216, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 10.69, |
|
"learning_rate": 2.939451085200233e-05, |
|
"loss": 1.341, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 10.72, |
|
"learning_rate": 2.4451776132909644e-05, |
|
"loss": 1.3848, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 10.75, |
|
"learning_rate": 1.9872757405677438e-05, |
|
"loss": 1.328, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 10.78, |
|
"learning_rate": 1.5695179004202758e-05, |
|
"loss": 1.3147, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 10.81, |
|
"learning_rate": 1.1953457988534349e-05, |
|
"loss": 1.2886, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 10.84, |
|
"learning_rate": 8.678420599260062e-06, |
|
"loss": 1.2943, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 10.87, |
|
"learning_rate": 5.8970482949323365e-06, |
|
"loss": 1.3852, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 10.9, |
|
"learning_rate": 3.6322554648015263e-06, |
|
"loss": 1.3121, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 10.92, |
|
"learning_rate": 1.902700648179519e-06, |
|
"loss": 1.37, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 10.95, |
|
"learning_rate": 7.226328157041105e-07, |
|
"loss": 1.3568, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 10.98, |
|
"learning_rate": 1.0177397892100256e-07, |
|
"loss": 1.2291, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"eval_loss": 1.4596115350723267, |
|
"eval_runtime": 15.3008, |
|
"eval_samples_per_second": 14.901, |
|
"eval_steps_per_second": 1.895, |
|
"step": 1903 |
|
} |
|
], |
|
"max_steps": 2249, |
|
"num_train_epochs": 13, |
|
"total_flos": 1979287142400000.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|