|
{ |
|
"best_metric": 0.922, |
|
"best_model_checkpoint": "checkpoint/vit-base/checkpoint-13653", |
|
"epoch": 41.0, |
|
"eval_steps": 500, |
|
"global_step": 13653, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.6713149547576904, |
|
"learning_rate": 9.996996996996998e-06, |
|
"loss": 4.6131, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.6888127326965332, |
|
"learning_rate": 9.993993993993994e-06, |
|
"loss": 4.6033, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.7794463634490967, |
|
"learning_rate": 9.990990990990992e-06, |
|
"loss": 4.6019, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.7187056541442871, |
|
"learning_rate": 9.987987987987989e-06, |
|
"loss": 4.5877, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.6908490657806396, |
|
"learning_rate": 9.984984984984985e-06, |
|
"loss": 4.578, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.6914451718330383, |
|
"learning_rate": 9.981981981981982e-06, |
|
"loss": 4.5734, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.7061388492584229, |
|
"learning_rate": 9.97897897897898e-06, |
|
"loss": 4.5547, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.7158679366111755, |
|
"learning_rate": 9.975975975975977e-06, |
|
"loss": 4.544, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.7526296973228455, |
|
"learning_rate": 9.972972972972975e-06, |
|
"loss": 4.5366, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.7619982957839966, |
|
"learning_rate": 9.96996996996997e-06, |
|
"loss": 4.5252, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.7292161583900452, |
|
"learning_rate": 9.966966966966968e-06, |
|
"loss": 4.511, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.7648303508758545, |
|
"learning_rate": 9.963963963963965e-06, |
|
"loss": 4.505, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.7616661190986633, |
|
"learning_rate": 9.960960960960961e-06, |
|
"loss": 4.4781, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.792500913143158, |
|
"learning_rate": 9.957957957957959e-06, |
|
"loss": 4.467, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 1.064399003982544, |
|
"learning_rate": 9.954954954954956e-06, |
|
"loss": 4.4612, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.8003212809562683, |
|
"learning_rate": 9.951951951951952e-06, |
|
"loss": 4.4334, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.8235049843788147, |
|
"learning_rate": 9.94894894894895e-06, |
|
"loss": 4.4173, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.8271332383155823, |
|
"learning_rate": 9.945945945945947e-06, |
|
"loss": 4.3952, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.8518035411834717, |
|
"learning_rate": 9.942942942942944e-06, |
|
"loss": 4.3846, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.8331427574157715, |
|
"learning_rate": 9.93993993993994e-06, |
|
"loss": 4.3631, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.8386998176574707, |
|
"learning_rate": 9.936936936936938e-06, |
|
"loss": 4.3418, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.9001342058181763, |
|
"learning_rate": 9.933933933933935e-06, |
|
"loss": 4.3226, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.9490275382995605, |
|
"learning_rate": 9.930930930930933e-06, |
|
"loss": 4.2988, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.9027977585792542, |
|
"learning_rate": 9.927927927927928e-06, |
|
"loss": 4.2937, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.8498959541320801, |
|
"learning_rate": 9.924924924924926e-06, |
|
"loss": 4.2621, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.9752369523048401, |
|
"learning_rate": 9.921921921921923e-06, |
|
"loss": 4.2379, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.8778105974197388, |
|
"learning_rate": 9.91891891891892e-06, |
|
"loss": 4.2288, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.9475248456001282, |
|
"learning_rate": 9.915915915915917e-06, |
|
"loss": 4.2066, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.0335466861724854, |
|
"learning_rate": 9.912912912912914e-06, |
|
"loss": 4.1814, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.091416597366333, |
|
"learning_rate": 9.90990990990991e-06, |
|
"loss": 4.1663, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.9959795475006104, |
|
"learning_rate": 9.906906906906907e-06, |
|
"loss": 4.145, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.9685789942741394, |
|
"learning_rate": 9.903903903903905e-06, |
|
"loss": 4.1289, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.0388822555541992, |
|
"learning_rate": 9.900900900900902e-06, |
|
"loss": 4.0904, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.6663, |
|
"eval_loss": 4.0142011642456055, |
|
"eval_runtime": 26.8951, |
|
"eval_samples_per_second": 371.815, |
|
"eval_steps_per_second": 1.487, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 1.0019899606704712, |
|
"learning_rate": 9.8978978978979e-06, |
|
"loss": 4.0766, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 1.0191450119018555, |
|
"learning_rate": 9.894894894894896e-06, |
|
"loss": 4.0525, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.9841620326042175, |
|
"learning_rate": 9.891891891891893e-06, |
|
"loss": 4.0274, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 1.0833967924118042, |
|
"learning_rate": 9.88888888888889e-06, |
|
"loss": 4.0079, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 1.0421665906906128, |
|
"learning_rate": 9.885885885885886e-06, |
|
"loss": 3.9865, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"grad_norm": 1.1187000274658203, |
|
"learning_rate": 9.882882882882884e-06, |
|
"loss": 3.9617, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 1.0615652799606323, |
|
"learning_rate": 9.879879879879881e-06, |
|
"loss": 3.9559, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 1.0917859077453613, |
|
"learning_rate": 9.876876876876877e-06, |
|
"loss": 3.9249, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 1.2941133975982666, |
|
"learning_rate": 9.873873873873875e-06, |
|
"loss": 3.9088, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"grad_norm": 1.2532916069030762, |
|
"learning_rate": 9.87087087087087e-06, |
|
"loss": 3.883, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 1.1082998514175415, |
|
"learning_rate": 9.867867867867868e-06, |
|
"loss": 3.8705, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 1.1208393573760986, |
|
"learning_rate": 9.864864864864865e-06, |
|
"loss": 3.8581, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 1.2406553030014038, |
|
"learning_rate": 9.861861861861863e-06, |
|
"loss": 3.8187, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 1.198391318321228, |
|
"learning_rate": 9.85885885885886e-06, |
|
"loss": 3.7917, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 1.045142412185669, |
|
"learning_rate": 9.855855855855858e-06, |
|
"loss": 3.7865, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 1.4195159673690796, |
|
"learning_rate": 9.852852852852854e-06, |
|
"loss": 3.7659, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 1.2999833822250366, |
|
"learning_rate": 9.849849849849851e-06, |
|
"loss": 3.7566, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"grad_norm": 1.128721833229065, |
|
"learning_rate": 9.846846846846849e-06, |
|
"loss": 3.7394, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 1.24117910861969, |
|
"learning_rate": 9.843843843843844e-06, |
|
"loss": 3.7364, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 1.3166449069976807, |
|
"learning_rate": 9.840840840840842e-06, |
|
"loss": 3.6919, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 1.3979932069778442, |
|
"learning_rate": 9.83783783783784e-06, |
|
"loss": 3.6865, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"grad_norm": 1.4577465057373047, |
|
"learning_rate": 9.834834834834835e-06, |
|
"loss": 3.67, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 1.3105660676956177, |
|
"learning_rate": 9.831831831831833e-06, |
|
"loss": 3.6448, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 1.2558164596557617, |
|
"learning_rate": 9.82882882882883e-06, |
|
"loss": 3.6515, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 1.2949180603027344, |
|
"learning_rate": 9.825825825825826e-06, |
|
"loss": 3.637, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"grad_norm": 1.2413955926895142, |
|
"learning_rate": 9.822822822822823e-06, |
|
"loss": 3.5952, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"grad_norm": 1.7929736375808716, |
|
"learning_rate": 9.81981981981982e-06, |
|
"loss": 3.5952, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 1.2751438617706299, |
|
"learning_rate": 9.816816816816818e-06, |
|
"loss": 3.5877, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 1.4566322565078735, |
|
"learning_rate": 9.813813813813816e-06, |
|
"loss": 3.5815, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 1.3014384508132935, |
|
"learning_rate": 9.810810810810811e-06, |
|
"loss": 3.5361, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 1.343555212020874, |
|
"learning_rate": 9.807807807807809e-06, |
|
"loss": 3.5176, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 1.2463815212249756, |
|
"learning_rate": 9.804804804804806e-06, |
|
"loss": 3.522, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 1.288573145866394, |
|
"learning_rate": 9.801801801801802e-06, |
|
"loss": 3.5092, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.7659, |
|
"eval_loss": 3.3562543392181396, |
|
"eval_runtime": 26.0758, |
|
"eval_samples_per_second": 383.497, |
|
"eval_steps_per_second": 1.534, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 1.3275058269500732, |
|
"learning_rate": 9.7987987987988e-06, |
|
"loss": 3.4905, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"grad_norm": 2.9843428134918213, |
|
"learning_rate": 9.795795795795795e-06, |
|
"loss": 3.4694, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 1.4618003368377686, |
|
"learning_rate": 9.792792792792793e-06, |
|
"loss": 3.448, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"grad_norm": 1.5394887924194336, |
|
"learning_rate": 9.78978978978979e-06, |
|
"loss": 3.4395, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 1.4299417734146118, |
|
"learning_rate": 9.786786786786788e-06, |
|
"loss": 3.4062, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 1.4161120653152466, |
|
"learning_rate": 9.783783783783785e-06, |
|
"loss": 3.3925, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 1.5492268800735474, |
|
"learning_rate": 9.780780780780781e-06, |
|
"loss": 3.3897, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 2.0198769569396973, |
|
"learning_rate": 9.777777777777779e-06, |
|
"loss": 3.3676, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 1.5388668775558472, |
|
"learning_rate": 9.774774774774776e-06, |
|
"loss": 3.358, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"grad_norm": 1.6485122442245483, |
|
"learning_rate": 9.771771771771774e-06, |
|
"loss": 3.3713, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 1.4683903455734253, |
|
"learning_rate": 9.76876876876877e-06, |
|
"loss": 3.3385, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"grad_norm": 2.024306058883667, |
|
"learning_rate": 9.765765765765767e-06, |
|
"loss": 3.3183, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 1.6113808155059814, |
|
"learning_rate": 9.762762762762763e-06, |
|
"loss": 3.2949, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 1.5689467191696167, |
|
"learning_rate": 9.75975975975976e-06, |
|
"loss": 3.3244, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 1.6313070058822632, |
|
"learning_rate": 9.756756756756758e-06, |
|
"loss": 3.3233, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"grad_norm": 1.8879677057266235, |
|
"learning_rate": 9.753753753753753e-06, |
|
"loss": 3.2708, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 1.6785987615585327, |
|
"learning_rate": 9.750750750750751e-06, |
|
"loss": 3.2796, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"grad_norm": 1.5085214376449585, |
|
"learning_rate": 9.747747747747748e-06, |
|
"loss": 3.2767, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 1.9259861707687378, |
|
"learning_rate": 9.744744744744746e-06, |
|
"loss": 3.2647, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 1.6628590822219849, |
|
"learning_rate": 9.741741741741743e-06, |
|
"loss": 3.21, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 1.4452886581420898, |
|
"learning_rate": 9.73873873873874e-06, |
|
"loss": 3.2561, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 1.4690823554992676, |
|
"learning_rate": 9.735735735735737e-06, |
|
"loss": 3.2207, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 1.831654667854309, |
|
"learning_rate": 9.732732732732734e-06, |
|
"loss": 3.1828, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 1.5297893285751343, |
|
"learning_rate": 9.729729729729732e-06, |
|
"loss": 3.2005, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 3.0812528133392334, |
|
"learning_rate": 9.726726726726727e-06, |
|
"loss": 3.2103, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 1.4284906387329102, |
|
"learning_rate": 9.723723723723725e-06, |
|
"loss": 3.1591, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 1.5879852771759033, |
|
"learning_rate": 9.72072072072072e-06, |
|
"loss": 3.1647, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 2.1024491786956787, |
|
"learning_rate": 9.717717717717718e-06, |
|
"loss": 3.1297, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"grad_norm": 1.941714882850647, |
|
"learning_rate": 9.714714714714716e-06, |
|
"loss": 3.1306, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 1.642816424369812, |
|
"learning_rate": 9.711711711711711e-06, |
|
"loss": 3.1393, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 1.7811369895935059, |
|
"learning_rate": 9.708708708708709e-06, |
|
"loss": 3.1237, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 1.7237188816070557, |
|
"learning_rate": 9.705705705705706e-06, |
|
"loss": 3.0829, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 2.070939064025879, |
|
"learning_rate": 9.702702702702704e-06, |
|
"loss": 3.0987, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.8043, |
|
"eval_loss": 2.906019926071167, |
|
"eval_runtime": 25.8478, |
|
"eval_samples_per_second": 386.881, |
|
"eval_steps_per_second": 1.548, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 2.190770149230957, |
|
"learning_rate": 9.699699699699701e-06, |
|
"loss": 3.0663, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"grad_norm": 1.7228102684020996, |
|
"learning_rate": 9.696696696696699e-06, |
|
"loss": 3.0587, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"grad_norm": 1.7540990114212036, |
|
"learning_rate": 9.693693693693694e-06, |
|
"loss": 3.0384, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"grad_norm": 1.5200778245925903, |
|
"learning_rate": 9.690690690690692e-06, |
|
"loss": 3.0352, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 1.7722601890563965, |
|
"learning_rate": 9.687687687687688e-06, |
|
"loss": 2.9858, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"grad_norm": 1.7385673522949219, |
|
"learning_rate": 9.684684684684685e-06, |
|
"loss": 3.0233, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"grad_norm": 2.0537796020507812, |
|
"learning_rate": 9.681681681681683e-06, |
|
"loss": 3.0097, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"grad_norm": 1.9146968126296997, |
|
"learning_rate": 9.678678678678679e-06, |
|
"loss": 2.9755, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 1.7920832633972168, |
|
"learning_rate": 9.675675675675676e-06, |
|
"loss": 2.9815, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"grad_norm": 2.126950979232788, |
|
"learning_rate": 9.672672672672673e-06, |
|
"loss": 2.991, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"grad_norm": 1.6951396465301514, |
|
"learning_rate": 9.669669669669671e-06, |
|
"loss": 2.9565, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"grad_norm": 1.8546791076660156, |
|
"learning_rate": 9.666666666666667e-06, |
|
"loss": 2.9878, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 1.9961843490600586, |
|
"learning_rate": 9.663663663663664e-06, |
|
"loss": 2.9606, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"grad_norm": 2.422366142272949, |
|
"learning_rate": 9.660660660660662e-06, |
|
"loss": 2.9569, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"grad_norm": 1.9534465074539185, |
|
"learning_rate": 9.65765765765766e-06, |
|
"loss": 2.9615, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"grad_norm": 1.9808764457702637, |
|
"learning_rate": 9.654654654654655e-06, |
|
"loss": 2.9488, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"grad_norm": 1.8011443614959717, |
|
"learning_rate": 9.651651651651652e-06, |
|
"loss": 2.9403, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"grad_norm": 1.944164514541626, |
|
"learning_rate": 9.64864864864865e-06, |
|
"loss": 2.901, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"grad_norm": 1.8396971225738525, |
|
"learning_rate": 9.645645645645646e-06, |
|
"loss": 2.9152, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"grad_norm": 2.1650946140289307, |
|
"learning_rate": 9.642642642642643e-06, |
|
"loss": 2.856, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"grad_norm": 1.9538077116012573, |
|
"learning_rate": 9.63963963963964e-06, |
|
"loss": 2.8426, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"grad_norm": 2.1741650104522705, |
|
"learning_rate": 9.636636636636636e-06, |
|
"loss": 2.8884, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"grad_norm": 2.2596497535705566, |
|
"learning_rate": 9.633633633633634e-06, |
|
"loss": 2.8665, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"grad_norm": 2.1494603157043457, |
|
"learning_rate": 9.630630630630631e-06, |
|
"loss": 2.855, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"grad_norm": 1.4631502628326416, |
|
"learning_rate": 9.627627627627629e-06, |
|
"loss": 2.8295, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"grad_norm": 2.025465250015259, |
|
"learning_rate": 9.624624624624626e-06, |
|
"loss": 2.807, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"grad_norm": 1.8180211782455444, |
|
"learning_rate": 9.621621621621622e-06, |
|
"loss": 2.8451, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"grad_norm": 2.007255792617798, |
|
"learning_rate": 9.61861861861862e-06, |
|
"loss": 2.7825, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 1.749640703201294, |
|
"learning_rate": 9.615615615615617e-06, |
|
"loss": 2.7795, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"grad_norm": 2.4561126232147217, |
|
"learning_rate": 9.612612612612613e-06, |
|
"loss": 2.8017, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"grad_norm": 2.0643820762634277, |
|
"learning_rate": 9.60960960960961e-06, |
|
"loss": 2.7468, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"grad_norm": 1.8208361864089966, |
|
"learning_rate": 9.606606606606608e-06, |
|
"loss": 2.7544, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"grad_norm": 2.3445229530334473, |
|
"learning_rate": 9.603603603603604e-06, |
|
"loss": 2.7689, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"grad_norm": 2.2914915084838867, |
|
"learning_rate": 9.600600600600601e-06, |
|
"loss": 2.7858, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.827, |
|
"eval_loss": 2.5428378582000732, |
|
"eval_runtime": 25.8397, |
|
"eval_samples_per_second": 387.001, |
|
"eval_steps_per_second": 1.548, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"grad_norm": 2.1701862812042236, |
|
"learning_rate": 9.597597597597599e-06, |
|
"loss": 2.7277, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"grad_norm": 2.483489990234375, |
|
"learning_rate": 9.594594594594594e-06, |
|
"loss": 2.6969, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"grad_norm": 2.0590124130249023, |
|
"learning_rate": 9.591591591591592e-06, |
|
"loss": 2.7261, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"grad_norm": 2.3316245079040527, |
|
"learning_rate": 9.58858858858859e-06, |
|
"loss": 2.699, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"grad_norm": 1.8884086608886719, |
|
"learning_rate": 9.585585585585587e-06, |
|
"loss": 2.6947, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"grad_norm": 1.9525305032730103, |
|
"learning_rate": 9.582582582582584e-06, |
|
"loss": 2.66, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"grad_norm": 2.617844581604004, |
|
"learning_rate": 9.57957957957958e-06, |
|
"loss": 2.6716, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"grad_norm": 2.178778648376465, |
|
"learning_rate": 9.576576576576578e-06, |
|
"loss": 2.7055, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"grad_norm": 2.4651923179626465, |
|
"learning_rate": 9.573573573573575e-06, |
|
"loss": 2.6734, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"grad_norm": 2.1029953956604004, |
|
"learning_rate": 9.57057057057057e-06, |
|
"loss": 2.65, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"grad_norm": 2.09940767288208, |
|
"learning_rate": 9.567567567567568e-06, |
|
"loss": 2.6153, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"grad_norm": 2.0970473289489746, |
|
"learning_rate": 9.564564564564566e-06, |
|
"loss": 2.6249, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"grad_norm": 2.4626379013061523, |
|
"learning_rate": 9.561561561561562e-06, |
|
"loss": 2.6275, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"grad_norm": 2.286036968231201, |
|
"learning_rate": 9.558558558558559e-06, |
|
"loss": 2.6403, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"grad_norm": 1.8655325174331665, |
|
"learning_rate": 9.555555555555556e-06, |
|
"loss": 2.5838, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"grad_norm": 2.145075798034668, |
|
"learning_rate": 9.552552552552552e-06, |
|
"loss": 2.5676, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"grad_norm": 2.7190279960632324, |
|
"learning_rate": 9.54954954954955e-06, |
|
"loss": 2.6144, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"grad_norm": 2.168372869491577, |
|
"learning_rate": 9.546546546546547e-06, |
|
"loss": 2.6037, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"grad_norm": 2.2117419242858887, |
|
"learning_rate": 9.543543543543545e-06, |
|
"loss": 2.5687, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"grad_norm": 2.153658866882324, |
|
"learning_rate": 9.540540540540542e-06, |
|
"loss": 2.5987, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"grad_norm": 2.9541666507720947, |
|
"learning_rate": 9.537537537537538e-06, |
|
"loss": 2.5457, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"grad_norm": 2.2983791828155518, |
|
"learning_rate": 9.534534534534535e-06, |
|
"loss": 2.5875, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"grad_norm": 2.269871711730957, |
|
"learning_rate": 9.531531531531533e-06, |
|
"loss": 2.5002, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"grad_norm": 2.495638370513916, |
|
"learning_rate": 9.528528528528529e-06, |
|
"loss": 2.541, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"grad_norm": 2.479295253753662, |
|
"learning_rate": 9.525525525525526e-06, |
|
"loss": 2.531, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"grad_norm": 2.627950668334961, |
|
"learning_rate": 9.522522522522524e-06, |
|
"loss": 2.5567, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 2.0522141456604004, |
|
"learning_rate": 9.51951951951952e-06, |
|
"loss": 2.5528, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"grad_norm": 2.1786510944366455, |
|
"learning_rate": 9.516516516516517e-06, |
|
"loss": 2.4905, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"grad_norm": 1.8431909084320068, |
|
"learning_rate": 9.513513513513514e-06, |
|
"loss": 2.5162, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"grad_norm": 3.211226224899292, |
|
"learning_rate": 9.510510510510512e-06, |
|
"loss": 2.4975, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"grad_norm": 2.483825922012329, |
|
"learning_rate": 9.507507507507508e-06, |
|
"loss": 2.495, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"grad_norm": 2.4815008640289307, |
|
"learning_rate": 9.504504504504505e-06, |
|
"loss": 2.5158, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"grad_norm": 2.0666556358337402, |
|
"learning_rate": 9.501501501501503e-06, |
|
"loss": 2.4372, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.8428, |
|
"eval_loss": 2.2353405952453613, |
|
"eval_runtime": 26.4297, |
|
"eval_samples_per_second": 378.362, |
|
"eval_steps_per_second": 1.513, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"grad_norm": 2.162529468536377, |
|
"learning_rate": 9.4984984984985e-06, |
|
"loss": 2.3732, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"grad_norm": 2.6510798931121826, |
|
"learning_rate": 9.495495495495496e-06, |
|
"loss": 2.4363, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"grad_norm": 2.430441379547119, |
|
"learning_rate": 9.492492492492493e-06, |
|
"loss": 2.4362, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"grad_norm": 2.468442678451538, |
|
"learning_rate": 9.489489489489491e-06, |
|
"loss": 2.4535, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"grad_norm": 2.7464473247528076, |
|
"learning_rate": 9.486486486486487e-06, |
|
"loss": 2.3854, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"grad_norm": 2.607032537460327, |
|
"learning_rate": 9.483483483483484e-06, |
|
"loss": 2.3926, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"grad_norm": 2.357286214828491, |
|
"learning_rate": 9.480480480480482e-06, |
|
"loss": 2.433, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"grad_norm": 2.7163023948669434, |
|
"learning_rate": 9.477477477477477e-06, |
|
"loss": 2.4164, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"grad_norm": 2.316063404083252, |
|
"learning_rate": 9.474474474474475e-06, |
|
"loss": 2.3618, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"grad_norm": 2.23887300491333, |
|
"learning_rate": 9.471471471471472e-06, |
|
"loss": 2.3878, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"grad_norm": 2.347841262817383, |
|
"learning_rate": 9.46846846846847e-06, |
|
"loss": 2.3571, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"grad_norm": 2.708570718765259, |
|
"learning_rate": 9.465465465465467e-06, |
|
"loss": 2.3421, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"grad_norm": 2.5647084712982178, |
|
"learning_rate": 9.462462462462463e-06, |
|
"loss": 2.347, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"grad_norm": 2.5164411067962646, |
|
"learning_rate": 9.45945945945946e-06, |
|
"loss": 2.3584, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"grad_norm": 4.1986541748046875, |
|
"learning_rate": 9.456456456456458e-06, |
|
"loss": 2.3429, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"grad_norm": 2.8616836071014404, |
|
"learning_rate": 9.453453453453454e-06, |
|
"loss": 2.3678, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"grad_norm": 3.228412628173828, |
|
"learning_rate": 9.450450450450451e-06, |
|
"loss": 2.2858, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"grad_norm": 2.5712051391601562, |
|
"learning_rate": 9.447447447447449e-06, |
|
"loss": 2.2745, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"grad_norm": 2.5423223972320557, |
|
"learning_rate": 9.444444444444445e-06, |
|
"loss": 2.323, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"grad_norm": 2.4743051528930664, |
|
"learning_rate": 9.441441441441442e-06, |
|
"loss": 2.2725, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"grad_norm": 3.4513115882873535, |
|
"learning_rate": 9.43843843843844e-06, |
|
"loss": 2.3433, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"grad_norm": 3.3393611907958984, |
|
"learning_rate": 9.435435435435435e-06, |
|
"loss": 2.3089, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"grad_norm": 2.2898778915405273, |
|
"learning_rate": 9.432432432432433e-06, |
|
"loss": 2.2618, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"grad_norm": 3.142050266265869, |
|
"learning_rate": 9.42942942942943e-06, |
|
"loss": 2.3027, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"grad_norm": 2.540493965148926, |
|
"learning_rate": 9.426426426426428e-06, |
|
"loss": 2.2619, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"grad_norm": 1.9641002416610718, |
|
"learning_rate": 9.423423423423425e-06, |
|
"loss": 2.2468, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"grad_norm": 3.1919631958007812, |
|
"learning_rate": 9.420420420420421e-06, |
|
"loss": 2.3087, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"grad_norm": 2.1615185737609863, |
|
"learning_rate": 9.417417417417418e-06, |
|
"loss": 2.2576, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"grad_norm": 2.5164833068847656, |
|
"learning_rate": 9.414414414414416e-06, |
|
"loss": 2.2505, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"grad_norm": 3.334547281265259, |
|
"learning_rate": 9.411411411411412e-06, |
|
"loss": 2.2617, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"grad_norm": 2.6057589054107666, |
|
"learning_rate": 9.40840840840841e-06, |
|
"loss": 2.2157, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"grad_norm": 2.547347068786621, |
|
"learning_rate": 9.405405405405407e-06, |
|
"loss": 2.2166, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"grad_norm": 2.4648900032043457, |
|
"learning_rate": 9.402402402402402e-06, |
|
"loss": 2.2157, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy": 0.8568, |
|
"eval_loss": 1.9597220420837402, |
|
"eval_runtime": 25.9049, |
|
"eval_samples_per_second": 386.028, |
|
"eval_steps_per_second": 1.544, |
|
"step": 1998 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"grad_norm": 2.6400487422943115, |
|
"learning_rate": 9.3993993993994e-06, |
|
"loss": 2.275, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"grad_norm": 3.2325823307037354, |
|
"learning_rate": 9.396396396396397e-06, |
|
"loss": 2.2424, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"grad_norm": 2.2359609603881836, |
|
"learning_rate": 9.393393393393393e-06, |
|
"loss": 2.1585, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"grad_norm": 2.844468116760254, |
|
"learning_rate": 9.39039039039039e-06, |
|
"loss": 2.1369, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"grad_norm": 2.3116044998168945, |
|
"learning_rate": 9.387387387387388e-06, |
|
"loss": 2.1565, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"grad_norm": 4.145083904266357, |
|
"learning_rate": 9.384384384384386e-06, |
|
"loss": 2.1671, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"grad_norm": 2.8794326782226562, |
|
"learning_rate": 9.381381381381383e-06, |
|
"loss": 2.1543, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"grad_norm": 3.18340802192688, |
|
"learning_rate": 9.378378378378379e-06, |
|
"loss": 2.1705, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"grad_norm": 2.4386444091796875, |
|
"learning_rate": 9.375375375375376e-06, |
|
"loss": 2.1628, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"grad_norm": 2.837346315383911, |
|
"learning_rate": 9.372372372372374e-06, |
|
"loss": 2.1315, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"grad_norm": 2.193432092666626, |
|
"learning_rate": 9.36936936936937e-06, |
|
"loss": 2.1403, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"grad_norm": 3.1398682594299316, |
|
"learning_rate": 9.366366366366367e-06, |
|
"loss": 2.1258, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"grad_norm": 2.416149377822876, |
|
"learning_rate": 9.363363363363363e-06, |
|
"loss": 2.1114, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"grad_norm": 3.2373199462890625, |
|
"learning_rate": 9.36036036036036e-06, |
|
"loss": 2.1422, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"grad_norm": 2.6956839561462402, |
|
"learning_rate": 9.357357357357358e-06, |
|
"loss": 2.0692, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"grad_norm": 3.576822280883789, |
|
"learning_rate": 9.354354354354355e-06, |
|
"loss": 2.136, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"grad_norm": 2.1885507106781006, |
|
"learning_rate": 9.351351351351353e-06, |
|
"loss": 2.1256, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"grad_norm": 3.042860507965088, |
|
"learning_rate": 9.34834834834835e-06, |
|
"loss": 2.1085, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"grad_norm": 3.4762566089630127, |
|
"learning_rate": 9.345345345345346e-06, |
|
"loss": 2.0583, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"grad_norm": 3.6814165115356445, |
|
"learning_rate": 9.342342342342344e-06, |
|
"loss": 2.1014, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"grad_norm": 2.7193524837493896, |
|
"learning_rate": 9.339339339339341e-06, |
|
"loss": 2.0804, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"grad_norm": 2.850829601287842, |
|
"learning_rate": 9.336336336336337e-06, |
|
"loss": 2.0757, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"grad_norm": 2.2540390491485596, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 2.0607, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"grad_norm": 2.819641351699829, |
|
"learning_rate": 9.330330330330332e-06, |
|
"loss": 2.0624, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"grad_norm": 2.1947712898254395, |
|
"learning_rate": 9.327327327327328e-06, |
|
"loss": 2.0198, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"grad_norm": 2.8388874530792236, |
|
"learning_rate": 9.324324324324325e-06, |
|
"loss": 1.9486, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"grad_norm": 2.8357861042022705, |
|
"learning_rate": 9.321321321321321e-06, |
|
"loss": 2.02, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"grad_norm": 2.7252373695373535, |
|
"learning_rate": 9.318318318318318e-06, |
|
"loss": 1.9955, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"grad_norm": 3.4677445888519287, |
|
"learning_rate": 9.315315315315316e-06, |
|
"loss": 2.0506, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"grad_norm": 3.0668785572052, |
|
"learning_rate": 9.312312312312313e-06, |
|
"loss": 1.9839, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"grad_norm": 2.9034082889556885, |
|
"learning_rate": 9.30930930930931e-06, |
|
"loss": 1.9957, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"grad_norm": 2.9479753971099854, |
|
"learning_rate": 9.306306306306308e-06, |
|
"loss": 2.0329, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"grad_norm": 3.0393028259277344, |
|
"learning_rate": 9.303303303303304e-06, |
|
"loss": 2.0185, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"grad_norm": 3.204620361328125, |
|
"learning_rate": 9.300300300300302e-06, |
|
"loss": 1.9573, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_accuracy": 0.8685, |
|
"eval_loss": 1.7240691184997559, |
|
"eval_runtime": 26.3924, |
|
"eval_samples_per_second": 378.897, |
|
"eval_steps_per_second": 1.516, |
|
"step": 2331 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"grad_norm": 3.030730962753296, |
|
"learning_rate": 9.297297297297299e-06, |
|
"loss": 1.9975, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"grad_norm": 2.9937710762023926, |
|
"learning_rate": 9.294294294294295e-06, |
|
"loss": 1.9828, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"grad_norm": 2.864546775817871, |
|
"learning_rate": 9.291291291291292e-06, |
|
"loss": 2.0085, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"grad_norm": 2.979231357574463, |
|
"learning_rate": 9.288288288288288e-06, |
|
"loss": 1.891, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"grad_norm": 2.9546091556549072, |
|
"learning_rate": 9.285285285285286e-06, |
|
"loss": 1.9396, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"grad_norm": 2.627347469329834, |
|
"learning_rate": 9.282282282282283e-06, |
|
"loss": 1.9539, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"grad_norm": 3.009409189224243, |
|
"learning_rate": 9.27927927927928e-06, |
|
"loss": 1.992, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"grad_norm": 2.4301156997680664, |
|
"learning_rate": 9.276276276276276e-06, |
|
"loss": 1.8915, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"grad_norm": 2.8977396488189697, |
|
"learning_rate": 9.273273273273274e-06, |
|
"loss": 1.9001, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"grad_norm": 3.451801300048828, |
|
"learning_rate": 9.270270270270271e-06, |
|
"loss": 1.8914, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"grad_norm": 3.6057212352752686, |
|
"learning_rate": 9.267267267267269e-06, |
|
"loss": 1.8743, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"grad_norm": 3.5656979084014893, |
|
"learning_rate": 9.264264264264266e-06, |
|
"loss": 1.9253, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"grad_norm": 2.9827640056610107, |
|
"learning_rate": 9.261261261261262e-06, |
|
"loss": 1.8978, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"grad_norm": 3.191035032272339, |
|
"learning_rate": 9.25825825825826e-06, |
|
"loss": 1.8851, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"grad_norm": 3.8623464107513428, |
|
"learning_rate": 9.255255255255255e-06, |
|
"loss": 1.8979, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"grad_norm": 2.779446840286255, |
|
"learning_rate": 9.252252252252253e-06, |
|
"loss": 1.867, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"grad_norm": 4.140806674957275, |
|
"learning_rate": 9.24924924924925e-06, |
|
"loss": 1.9244, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"grad_norm": 3.2873432636260986, |
|
"learning_rate": 9.246246246246246e-06, |
|
"loss": 1.8786, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"grad_norm": 3.417663812637329, |
|
"learning_rate": 9.243243243243243e-06, |
|
"loss": 1.8828, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"grad_norm": 3.7559661865234375, |
|
"learning_rate": 9.240240240240241e-06, |
|
"loss": 1.8255, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"grad_norm": 3.8991100788116455, |
|
"learning_rate": 9.237237237237238e-06, |
|
"loss": 1.8616, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"grad_norm": 2.5654900074005127, |
|
"learning_rate": 9.234234234234236e-06, |
|
"loss": 1.8223, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"grad_norm": 3.494316577911377, |
|
"learning_rate": 9.231231231231232e-06, |
|
"loss": 1.818, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"grad_norm": 3.2533693313598633, |
|
"learning_rate": 9.228228228228229e-06, |
|
"loss": 1.8748, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"grad_norm": 3.4433581829071045, |
|
"learning_rate": 9.225225225225227e-06, |
|
"loss": 1.8446, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"grad_norm": 3.0548644065856934, |
|
"learning_rate": 9.222222222222224e-06, |
|
"loss": 1.8367, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"grad_norm": 3.341350793838501, |
|
"learning_rate": 9.21921921921922e-06, |
|
"loss": 1.8764, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"grad_norm": 3.763580560684204, |
|
"learning_rate": 9.216216216216217e-06, |
|
"loss": 1.8317, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"grad_norm": 3.323333263397217, |
|
"learning_rate": 9.213213213213213e-06, |
|
"loss": 1.8128, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"grad_norm": 2.7645206451416016, |
|
"learning_rate": 9.21021021021021e-06, |
|
"loss": 1.8034, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"grad_norm": 4.599252223968506, |
|
"learning_rate": 9.207207207207208e-06, |
|
"loss": 1.7933, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"grad_norm": 2.671976089477539, |
|
"learning_rate": 9.204204204204204e-06, |
|
"loss": 1.7799, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"grad_norm": 4.781398296356201, |
|
"learning_rate": 9.201201201201201e-06, |
|
"loss": 1.841, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy": 0.8736, |
|
"eval_loss": 1.5209704637527466, |
|
"eval_runtime": 26.2803, |
|
"eval_samples_per_second": 380.513, |
|
"eval_steps_per_second": 1.522, |
|
"step": 2664 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"grad_norm": 2.8974978923797607, |
|
"learning_rate": 9.198198198198199e-06, |
|
"loss": 1.7335, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"grad_norm": 3.6113295555114746, |
|
"learning_rate": 9.195195195195196e-06, |
|
"loss": 1.7897, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"grad_norm": 3.8488831520080566, |
|
"learning_rate": 9.192192192192194e-06, |
|
"loss": 1.7668, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"grad_norm": 2.893803834915161, |
|
"learning_rate": 9.189189189189191e-06, |
|
"loss": 1.7553, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"grad_norm": 3.7040750980377197, |
|
"learning_rate": 9.186186186186187e-06, |
|
"loss": 1.7481, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"grad_norm": 3.1729960441589355, |
|
"learning_rate": 9.183183183183185e-06, |
|
"loss": 1.7584, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"grad_norm": 2.709282875061035, |
|
"learning_rate": 9.18018018018018e-06, |
|
"loss": 1.7882, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"grad_norm": 3.1258764266967773, |
|
"learning_rate": 9.177177177177178e-06, |
|
"loss": 1.7394, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"grad_norm": 3.4164059162139893, |
|
"learning_rate": 9.174174174174175e-06, |
|
"loss": 1.7657, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"grad_norm": 4.909099578857422, |
|
"learning_rate": 9.171171171171171e-06, |
|
"loss": 1.7344, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"grad_norm": 3.1216490268707275, |
|
"learning_rate": 9.168168168168169e-06, |
|
"loss": 1.714, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"grad_norm": 2.712817430496216, |
|
"learning_rate": 9.165165165165166e-06, |
|
"loss": 1.7288, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"grad_norm": 3.531217098236084, |
|
"learning_rate": 9.162162162162162e-06, |
|
"loss": 1.7329, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"grad_norm": 3.097011089324951, |
|
"learning_rate": 9.15915915915916e-06, |
|
"loss": 1.7194, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"grad_norm": 3.811913251876831, |
|
"learning_rate": 9.156156156156157e-06, |
|
"loss": 1.7242, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"grad_norm": 3.676722288131714, |
|
"learning_rate": 9.153153153153154e-06, |
|
"loss": 1.6821, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"grad_norm": 3.473322868347168, |
|
"learning_rate": 9.150150150150152e-06, |
|
"loss": 1.6764, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"grad_norm": 3.613896369934082, |
|
"learning_rate": 9.147147147147147e-06, |
|
"loss": 1.6737, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"grad_norm": 3.478484869003296, |
|
"learning_rate": 9.144144144144145e-06, |
|
"loss": 1.6648, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"grad_norm": 3.295175313949585, |
|
"learning_rate": 9.141141141141142e-06, |
|
"loss": 1.6682, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"grad_norm": 3.504593849182129, |
|
"learning_rate": 9.138138138138138e-06, |
|
"loss": 1.641, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"grad_norm": 3.8884923458099365, |
|
"learning_rate": 9.135135135135136e-06, |
|
"loss": 1.6504, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"grad_norm": 3.4164955615997314, |
|
"learning_rate": 9.132132132132133e-06, |
|
"loss": 1.6809, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"grad_norm": 2.9317514896392822, |
|
"learning_rate": 9.129129129129129e-06, |
|
"loss": 1.6526, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"grad_norm": 3.4553208351135254, |
|
"learning_rate": 9.126126126126126e-06, |
|
"loss": 1.6936, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"grad_norm": 3.3463358879089355, |
|
"learning_rate": 9.123123123123124e-06, |
|
"loss": 1.6249, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"grad_norm": 2.9135587215423584, |
|
"learning_rate": 9.120120120120121e-06, |
|
"loss": 1.6142, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"grad_norm": 3.325805902481079, |
|
"learning_rate": 9.117117117117117e-06, |
|
"loss": 1.6483, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"grad_norm": 3.7595407962799072, |
|
"learning_rate": 9.114114114114115e-06, |
|
"loss": 1.5844, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"grad_norm": 3.486954927444458, |
|
"learning_rate": 9.111111111111112e-06, |
|
"loss": 1.6402, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"grad_norm": 3.0209648609161377, |
|
"learning_rate": 9.10810810810811e-06, |
|
"loss": 1.6557, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"grad_norm": 3.117098093032837, |
|
"learning_rate": 9.105105105105105e-06, |
|
"loss": 1.613, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"grad_norm": 4.14963436126709, |
|
"learning_rate": 9.102102102102103e-06, |
|
"loss": 1.6085, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_accuracy": 0.8832, |
|
"eval_loss": 1.3363327980041504, |
|
"eval_runtime": 25.8706, |
|
"eval_samples_per_second": 386.54, |
|
"eval_steps_per_second": 1.546, |
|
"step": 2997 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"grad_norm": 3.161076784133911, |
|
"learning_rate": 9.0990990990991e-06, |
|
"loss": 1.7363, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"grad_norm": 3.7530171871185303, |
|
"learning_rate": 9.096096096096096e-06, |
|
"loss": 1.6056, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"grad_norm": 3.0861854553222656, |
|
"learning_rate": 9.093093093093094e-06, |
|
"loss": 1.6208, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"grad_norm": 4.3751702308654785, |
|
"learning_rate": 9.090090090090091e-06, |
|
"loss": 1.582, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"grad_norm": 2.86613130569458, |
|
"learning_rate": 9.087087087087087e-06, |
|
"loss": 1.5261, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"grad_norm": 2.8464765548706055, |
|
"learning_rate": 9.084084084084084e-06, |
|
"loss": 1.5857, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"grad_norm": 2.6522128582000732, |
|
"learning_rate": 9.081081081081082e-06, |
|
"loss": 1.66, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"grad_norm": 3.4797539710998535, |
|
"learning_rate": 9.07807807807808e-06, |
|
"loss": 1.587, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"grad_norm": 3.0189497470855713, |
|
"learning_rate": 9.075075075075077e-06, |
|
"loss": 1.5915, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"grad_norm": 5.351146221160889, |
|
"learning_rate": 9.072072072072073e-06, |
|
"loss": 1.5165, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"grad_norm": 3.7793538570404053, |
|
"learning_rate": 9.06906906906907e-06, |
|
"loss": 1.5507, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"grad_norm": 3.4388442039489746, |
|
"learning_rate": 9.066066066066068e-06, |
|
"loss": 1.5718, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"grad_norm": 3.262924909591675, |
|
"learning_rate": 9.063063063063063e-06, |
|
"loss": 1.612, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"grad_norm": 5.9477386474609375, |
|
"learning_rate": 9.06006006006006e-06, |
|
"loss": 1.5597, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 9.43, |
|
"grad_norm": 3.848048448562622, |
|
"learning_rate": 9.057057057057058e-06, |
|
"loss": 1.5803, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"grad_norm": 4.263827800750732, |
|
"learning_rate": 9.054054054054054e-06, |
|
"loss": 1.5315, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"grad_norm": 5.525787830352783, |
|
"learning_rate": 9.051051051051052e-06, |
|
"loss": 1.5075, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"grad_norm": 2.918323516845703, |
|
"learning_rate": 9.048048048048049e-06, |
|
"loss": 1.4725, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"grad_norm": 6.338906764984131, |
|
"learning_rate": 9.045045045045045e-06, |
|
"loss": 1.594, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"grad_norm": 3.055828809738159, |
|
"learning_rate": 9.042042042042042e-06, |
|
"loss": 1.5254, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 9.61, |
|
"grad_norm": 3.1687676906585693, |
|
"learning_rate": 9.03903903903904e-06, |
|
"loss": 1.5004, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"grad_norm": 2.2005269527435303, |
|
"learning_rate": 9.036036036036037e-06, |
|
"loss": 1.502, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"grad_norm": 3.7019267082214355, |
|
"learning_rate": 9.033033033033035e-06, |
|
"loss": 1.5771, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"grad_norm": 4.179267883300781, |
|
"learning_rate": 9.03003003003003e-06, |
|
"loss": 1.506, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"grad_norm": 4.042637825012207, |
|
"learning_rate": 9.027027027027028e-06, |
|
"loss": 1.5327, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"grad_norm": 5.137342929840088, |
|
"learning_rate": 9.024024024024025e-06, |
|
"loss": 1.5486, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"grad_norm": 3.571098566055298, |
|
"learning_rate": 9.021021021021021e-06, |
|
"loss": 1.4718, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"grad_norm": 3.28196382522583, |
|
"learning_rate": 9.018018018018019e-06, |
|
"loss": 1.5306, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"grad_norm": 2.9630990028381348, |
|
"learning_rate": 9.015015015015016e-06, |
|
"loss": 1.4988, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"grad_norm": 4.726624965667725, |
|
"learning_rate": 9.012012012012012e-06, |
|
"loss": 1.4633, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"grad_norm": 3.4337375164031982, |
|
"learning_rate": 9.00900900900901e-06, |
|
"loss": 1.4439, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 9.94, |
|
"grad_norm": 4.365289688110352, |
|
"learning_rate": 9.006006006006007e-06, |
|
"loss": 1.498, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"grad_norm": 3.175220012664795, |
|
"learning_rate": 9.003003003003003e-06, |
|
"loss": 1.4573, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 3.100367307662964, |
|
"learning_rate": 9e-06, |
|
"loss": 1.4188, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_accuracy": 0.8861, |
|
"eval_loss": 1.1857481002807617, |
|
"eval_runtime": 25.9956, |
|
"eval_samples_per_second": 384.681, |
|
"eval_steps_per_second": 1.539, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 10.03, |
|
"grad_norm": 3.940972328186035, |
|
"learning_rate": 8.996996996996998e-06, |
|
"loss": 1.4333, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 10.06, |
|
"grad_norm": 4.0231499671936035, |
|
"learning_rate": 8.993993993993995e-06, |
|
"loss": 1.4701, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 10.09, |
|
"grad_norm": 3.5897438526153564, |
|
"learning_rate": 8.990990990990993e-06, |
|
"loss": 1.4368, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 10.12, |
|
"grad_norm": 4.010433673858643, |
|
"learning_rate": 8.987987987987988e-06, |
|
"loss": 1.4501, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 10.15, |
|
"grad_norm": 3.788407325744629, |
|
"learning_rate": 8.984984984984986e-06, |
|
"loss": 1.4382, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 10.18, |
|
"grad_norm": 3.2629690170288086, |
|
"learning_rate": 8.981981981981983e-06, |
|
"loss": 1.4122, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 10.21, |
|
"grad_norm": 4.393489360809326, |
|
"learning_rate": 8.97897897897898e-06, |
|
"loss": 1.4517, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 10.24, |
|
"grad_norm": 3.116422653198242, |
|
"learning_rate": 8.975975975975977e-06, |
|
"loss": 1.4486, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 10.27, |
|
"grad_norm": 3.059415340423584, |
|
"learning_rate": 8.972972972972974e-06, |
|
"loss": 1.4273, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 10.3, |
|
"grad_norm": 3.2235398292541504, |
|
"learning_rate": 8.96996996996997e-06, |
|
"loss": 1.4208, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 10.33, |
|
"grad_norm": 3.3876900672912598, |
|
"learning_rate": 8.966966966966967e-06, |
|
"loss": 1.4676, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 10.36, |
|
"grad_norm": 4.044349193572998, |
|
"learning_rate": 8.963963963963965e-06, |
|
"loss": 1.4552, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 10.39, |
|
"grad_norm": 3.1774368286132812, |
|
"learning_rate": 8.960960960960962e-06, |
|
"loss": 1.3933, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 10.42, |
|
"grad_norm": 3.186774253845215, |
|
"learning_rate": 8.957957957957958e-06, |
|
"loss": 1.4075, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 10.45, |
|
"grad_norm": 4.519390106201172, |
|
"learning_rate": 8.954954954954956e-06, |
|
"loss": 1.4225, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 10.48, |
|
"grad_norm": 3.620778799057007, |
|
"learning_rate": 8.951951951951953e-06, |
|
"loss": 1.4318, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 10.51, |
|
"grad_norm": 3.7709479331970215, |
|
"learning_rate": 8.94894894894895e-06, |
|
"loss": 1.4314, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 10.54, |
|
"grad_norm": 4.269083023071289, |
|
"learning_rate": 8.945945945945946e-06, |
|
"loss": 1.3506, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 10.57, |
|
"grad_norm": 3.909588098526001, |
|
"learning_rate": 8.942942942942944e-06, |
|
"loss": 1.3592, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 10.6, |
|
"grad_norm": 7.832637310028076, |
|
"learning_rate": 8.939939939939941e-06, |
|
"loss": 1.4296, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 10.63, |
|
"grad_norm": 4.4831719398498535, |
|
"learning_rate": 8.936936936936937e-06, |
|
"loss": 1.3888, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 10.66, |
|
"grad_norm": 3.919921398162842, |
|
"learning_rate": 8.933933933933935e-06, |
|
"loss": 1.4129, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 10.69, |
|
"grad_norm": 3.411519765853882, |
|
"learning_rate": 8.93093093093093e-06, |
|
"loss": 1.3775, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 10.72, |
|
"grad_norm": 4.0573506355285645, |
|
"learning_rate": 8.927927927927928e-06, |
|
"loss": 1.3593, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 10.75, |
|
"grad_norm": 5.557109832763672, |
|
"learning_rate": 8.924924924924925e-06, |
|
"loss": 1.3374, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 10.78, |
|
"grad_norm": 4.917846202850342, |
|
"learning_rate": 8.921921921921923e-06, |
|
"loss": 1.3134, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 10.81, |
|
"grad_norm": 3.7236461639404297, |
|
"learning_rate": 8.91891891891892e-06, |
|
"loss": 1.3706, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 10.84, |
|
"grad_norm": 4.710603713989258, |
|
"learning_rate": 8.915915915915918e-06, |
|
"loss": 1.3007, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 10.87, |
|
"grad_norm": 3.778068780899048, |
|
"learning_rate": 8.912912912912914e-06, |
|
"loss": 1.3659, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 10.9, |
|
"grad_norm": 4.073859214782715, |
|
"learning_rate": 8.909909909909911e-06, |
|
"loss": 1.3516, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 10.93, |
|
"grad_norm": 3.431516408920288, |
|
"learning_rate": 8.906906906906909e-06, |
|
"loss": 1.3217, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 10.96, |
|
"grad_norm": 4.807550430297852, |
|
"learning_rate": 8.903903903903904e-06, |
|
"loss": 1.3234, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 10.99, |
|
"grad_norm": 2.7062273025512695, |
|
"learning_rate": 8.900900900900902e-06, |
|
"loss": 1.3424, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"eval_accuracy": 0.8923, |
|
"eval_loss": 1.0521777868270874, |
|
"eval_runtime": 26.5374, |
|
"eval_samples_per_second": 376.827, |
|
"eval_steps_per_second": 1.507, |
|
"step": 3663 |
|
}, |
|
{ |
|
"epoch": 11.02, |
|
"grad_norm": 3.720083236694336, |
|
"learning_rate": 8.8978978978979e-06, |
|
"loss": 1.4965, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 11.05, |
|
"grad_norm": 9.219380378723145, |
|
"learning_rate": 8.894894894894895e-06, |
|
"loss": 1.3053, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 11.08, |
|
"grad_norm": 4.394057273864746, |
|
"learning_rate": 8.891891891891893e-06, |
|
"loss": 1.3092, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 11.11, |
|
"grad_norm": 4.5011887550354, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 1.3167, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 11.14, |
|
"grad_norm": 3.506056070327759, |
|
"learning_rate": 8.885885885885886e-06, |
|
"loss": 1.285, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 11.17, |
|
"grad_norm": 2.9524905681610107, |
|
"learning_rate": 8.882882882882883e-06, |
|
"loss": 1.2807, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 11.2, |
|
"grad_norm": 4.951297760009766, |
|
"learning_rate": 8.87987987987988e-06, |
|
"loss": 1.3685, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 11.23, |
|
"grad_norm": 4.545038223266602, |
|
"learning_rate": 8.876876876876878e-06, |
|
"loss": 1.2753, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 11.26, |
|
"grad_norm": 3.9689831733703613, |
|
"learning_rate": 8.873873873873876e-06, |
|
"loss": 1.2925, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 11.29, |
|
"grad_norm": 3.231689929962158, |
|
"learning_rate": 8.870870870870871e-06, |
|
"loss": 1.3154, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 11.32, |
|
"grad_norm": 4.227672100067139, |
|
"learning_rate": 8.867867867867869e-06, |
|
"loss": 1.3276, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 11.35, |
|
"grad_norm": 3.2093513011932373, |
|
"learning_rate": 8.864864864864866e-06, |
|
"loss": 1.3571, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 11.38, |
|
"grad_norm": 3.391765832901001, |
|
"learning_rate": 8.861861861861862e-06, |
|
"loss": 1.3214, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 11.41, |
|
"grad_norm": 2.6600348949432373, |
|
"learning_rate": 8.85885885885886e-06, |
|
"loss": 1.3242, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 11.44, |
|
"grad_norm": 3.3445725440979004, |
|
"learning_rate": 8.855855855855855e-06, |
|
"loss": 1.2893, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 11.47, |
|
"grad_norm": 5.254781246185303, |
|
"learning_rate": 8.852852852852853e-06, |
|
"loss": 1.2612, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 11.5, |
|
"grad_norm": 3.754554271697998, |
|
"learning_rate": 8.84984984984985e-06, |
|
"loss": 1.2711, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 11.53, |
|
"grad_norm": 4.283574104309082, |
|
"learning_rate": 8.846846846846848e-06, |
|
"loss": 1.2976, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 11.56, |
|
"grad_norm": 5.312755107879639, |
|
"learning_rate": 8.843843843843844e-06, |
|
"loss": 1.2624, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 11.59, |
|
"grad_norm": 3.4620511531829834, |
|
"learning_rate": 8.840840840840841e-06, |
|
"loss": 1.3101, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 11.62, |
|
"grad_norm": 3.3053994178771973, |
|
"learning_rate": 8.837837837837839e-06, |
|
"loss": 1.292, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 11.65, |
|
"grad_norm": 3.8711774349212646, |
|
"learning_rate": 8.834834834834836e-06, |
|
"loss": 1.2923, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 11.68, |
|
"grad_norm": 3.261052131652832, |
|
"learning_rate": 8.831831831831834e-06, |
|
"loss": 1.2687, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 11.71, |
|
"grad_norm": 5.33121919631958, |
|
"learning_rate": 8.82882882882883e-06, |
|
"loss": 1.3165, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 11.74, |
|
"grad_norm": 4.399143218994141, |
|
"learning_rate": 8.825825825825827e-06, |
|
"loss": 1.2678, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 11.77, |
|
"grad_norm": 4.487939357757568, |
|
"learning_rate": 8.822822822822824e-06, |
|
"loss": 1.227, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 11.8, |
|
"grad_norm": 4.024313449859619, |
|
"learning_rate": 8.81981981981982e-06, |
|
"loss": 1.2681, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 11.83, |
|
"grad_norm": 4.159395694732666, |
|
"learning_rate": 8.816816816816818e-06, |
|
"loss": 1.2048, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 11.86, |
|
"grad_norm": 3.9923174381256104, |
|
"learning_rate": 8.813813813813813e-06, |
|
"loss": 1.2211, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 11.89, |
|
"grad_norm": 4.025728225708008, |
|
"learning_rate": 8.810810810810811e-06, |
|
"loss": 1.2415, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 11.92, |
|
"grad_norm": 3.927666664123535, |
|
"learning_rate": 8.807807807807808e-06, |
|
"loss": 1.2254, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 11.95, |
|
"grad_norm": 5.660053253173828, |
|
"learning_rate": 8.804804804804806e-06, |
|
"loss": 1.2388, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 11.98, |
|
"grad_norm": 3.2951526641845703, |
|
"learning_rate": 8.801801801801803e-06, |
|
"loss": 1.1924, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_accuracy": 0.8983, |
|
"eval_loss": 0.9380095601081848, |
|
"eval_runtime": 26.1486, |
|
"eval_samples_per_second": 382.429, |
|
"eval_steps_per_second": 1.53, |
|
"step": 3996 |
|
}, |
|
{ |
|
"epoch": 12.01, |
|
"grad_norm": 4.126723289489746, |
|
"learning_rate": 8.798798798798799e-06, |
|
"loss": 1.3484, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 12.04, |
|
"grad_norm": 4.03225564956665, |
|
"learning_rate": 8.795795795795797e-06, |
|
"loss": 1.2141, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 12.07, |
|
"grad_norm": 4.17138671875, |
|
"learning_rate": 8.792792792792794e-06, |
|
"loss": 1.1898, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 12.1, |
|
"grad_norm": 2.8586363792419434, |
|
"learning_rate": 8.789789789789792e-06, |
|
"loss": 1.1838, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 12.13, |
|
"grad_norm": 4.225535869598389, |
|
"learning_rate": 8.786786786786787e-06, |
|
"loss": 1.1863, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 12.16, |
|
"grad_norm": 4.490497589111328, |
|
"learning_rate": 8.783783783783785e-06, |
|
"loss": 1.218, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 12.19, |
|
"grad_norm": 5.3085408210754395, |
|
"learning_rate": 8.78078078078078e-06, |
|
"loss": 1.189, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 12.22, |
|
"grad_norm": 4.248027801513672, |
|
"learning_rate": 8.777777777777778e-06, |
|
"loss": 1.1828, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 12.25, |
|
"grad_norm": 4.555266380310059, |
|
"learning_rate": 8.774774774774776e-06, |
|
"loss": 1.2203, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 12.28, |
|
"grad_norm": 4.271499156951904, |
|
"learning_rate": 8.771771771771771e-06, |
|
"loss": 1.2298, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 12.31, |
|
"grad_norm": 4.123808860778809, |
|
"learning_rate": 8.768768768768769e-06, |
|
"loss": 1.1945, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 12.34, |
|
"grad_norm": 4.744080066680908, |
|
"learning_rate": 8.765765765765766e-06, |
|
"loss": 1.1765, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 12.37, |
|
"grad_norm": 5.163188934326172, |
|
"learning_rate": 8.762762762762764e-06, |
|
"loss": 1.2019, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 12.4, |
|
"grad_norm": 4.672330379486084, |
|
"learning_rate": 8.759759759759761e-06, |
|
"loss": 1.2162, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 12.43, |
|
"grad_norm": 3.3714725971221924, |
|
"learning_rate": 8.756756756756759e-06, |
|
"loss": 1.2412, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 12.46, |
|
"grad_norm": 4.335172653198242, |
|
"learning_rate": 8.753753753753755e-06, |
|
"loss": 1.1686, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 12.49, |
|
"grad_norm": 3.776484251022339, |
|
"learning_rate": 8.750750750750752e-06, |
|
"loss": 1.1799, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 12.52, |
|
"grad_norm": 4.604945659637451, |
|
"learning_rate": 8.747747747747748e-06, |
|
"loss": 1.1592, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 12.55, |
|
"grad_norm": 3.4792492389678955, |
|
"learning_rate": 8.744744744744745e-06, |
|
"loss": 1.1912, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 12.58, |
|
"grad_norm": 5.771131992340088, |
|
"learning_rate": 8.741741741741743e-06, |
|
"loss": 1.1675, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 12.61, |
|
"grad_norm": 3.7320287227630615, |
|
"learning_rate": 8.738738738738739e-06, |
|
"loss": 1.1597, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 12.64, |
|
"grad_norm": 5.218122959136963, |
|
"learning_rate": 8.735735735735736e-06, |
|
"loss": 1.1764, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 12.67, |
|
"grad_norm": 3.9445109367370605, |
|
"learning_rate": 8.732732732732733e-06, |
|
"loss": 1.1086, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 12.7, |
|
"grad_norm": 4.147899150848389, |
|
"learning_rate": 8.72972972972973e-06, |
|
"loss": 1.1232, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 12.73, |
|
"grad_norm": 5.463786602020264, |
|
"learning_rate": 8.726726726726727e-06, |
|
"loss": 1.1803, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 12.76, |
|
"grad_norm": 3.1907215118408203, |
|
"learning_rate": 8.723723723723724e-06, |
|
"loss": 1.0963, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 12.79, |
|
"grad_norm": 4.256779193878174, |
|
"learning_rate": 8.720720720720722e-06, |
|
"loss": 1.1338, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 12.82, |
|
"grad_norm": 3.1030843257904053, |
|
"learning_rate": 8.71771771771772e-06, |
|
"loss": 1.1643, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 12.85, |
|
"grad_norm": 3.6219348907470703, |
|
"learning_rate": 8.714714714714717e-06, |
|
"loss": 1.1564, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 12.88, |
|
"grad_norm": 4.349027633666992, |
|
"learning_rate": 8.711711711711712e-06, |
|
"loss": 1.0917, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 12.91, |
|
"grad_norm": 3.0795490741729736, |
|
"learning_rate": 8.70870870870871e-06, |
|
"loss": 1.1245, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 12.94, |
|
"grad_norm": 3.5159428119659424, |
|
"learning_rate": 8.705705705705706e-06, |
|
"loss": 1.1587, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 12.97, |
|
"grad_norm": 3.8004889488220215, |
|
"learning_rate": 8.702702702702703e-06, |
|
"loss": 1.1764, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"eval_accuracy": 0.8999, |
|
"eval_loss": 0.8404828310012817, |
|
"eval_runtime": 26.4667, |
|
"eval_samples_per_second": 377.833, |
|
"eval_steps_per_second": 1.511, |
|
"step": 4329 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"grad_norm": 5.436849594116211, |
|
"learning_rate": 8.6996996996997e-06, |
|
"loss": 1.1035, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 13.03, |
|
"grad_norm": 4.124845504760742, |
|
"learning_rate": 8.696696696696696e-06, |
|
"loss": 1.0423, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 13.06, |
|
"grad_norm": 7.9258036613464355, |
|
"learning_rate": 8.693693693693694e-06, |
|
"loss": 1.096, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 13.09, |
|
"grad_norm": 4.796426296234131, |
|
"learning_rate": 8.690690690690691e-06, |
|
"loss": 1.1952, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 13.12, |
|
"grad_norm": 4.917626857757568, |
|
"learning_rate": 8.687687687687689e-06, |
|
"loss": 1.1613, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 13.15, |
|
"grad_norm": 3.8574671745300293, |
|
"learning_rate": 8.684684684684686e-06, |
|
"loss": 1.1469, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 13.18, |
|
"grad_norm": 3.908940315246582, |
|
"learning_rate": 8.681681681681682e-06, |
|
"loss": 1.1275, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 13.21, |
|
"grad_norm": 3.9402410984039307, |
|
"learning_rate": 8.67867867867868e-06, |
|
"loss": 1.141, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 13.24, |
|
"grad_norm": 2.884037971496582, |
|
"learning_rate": 8.675675675675677e-06, |
|
"loss": 1.0775, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 13.27, |
|
"grad_norm": 5.2757439613342285, |
|
"learning_rate": 8.672672672672673e-06, |
|
"loss": 1.1441, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 13.3, |
|
"grad_norm": 5.043304920196533, |
|
"learning_rate": 8.66966966966967e-06, |
|
"loss": 1.1053, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 13.33, |
|
"grad_norm": 3.6216320991516113, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 1.0889, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 13.36, |
|
"grad_norm": 3.5936968326568604, |
|
"learning_rate": 8.663663663663664e-06, |
|
"loss": 1.0993, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 13.39, |
|
"grad_norm": 4.1380743980407715, |
|
"learning_rate": 8.660660660660661e-06, |
|
"loss": 1.1261, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 13.42, |
|
"grad_norm": 3.888875722885132, |
|
"learning_rate": 8.657657657657659e-06, |
|
"loss": 1.1264, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 13.45, |
|
"grad_norm": 4.108713150024414, |
|
"learning_rate": 8.654654654654654e-06, |
|
"loss": 1.1277, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 13.48, |
|
"grad_norm": 4.775360107421875, |
|
"learning_rate": 8.651651651651652e-06, |
|
"loss": 1.0757, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 13.51, |
|
"grad_norm": 4.780664920806885, |
|
"learning_rate": 8.64864864864865e-06, |
|
"loss": 1.0774, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 13.54, |
|
"grad_norm": 4.424095630645752, |
|
"learning_rate": 8.645645645645647e-06, |
|
"loss": 1.1321, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 13.57, |
|
"grad_norm": 5.375193119049072, |
|
"learning_rate": 8.642642642642644e-06, |
|
"loss": 1.0712, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 13.6, |
|
"grad_norm": 4.035747051239014, |
|
"learning_rate": 8.63963963963964e-06, |
|
"loss": 1.1009, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 13.63, |
|
"grad_norm": 5.080639362335205, |
|
"learning_rate": 8.636636636636638e-06, |
|
"loss": 1.1534, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 13.66, |
|
"grad_norm": 3.3346140384674072, |
|
"learning_rate": 8.633633633633635e-06, |
|
"loss": 1.0454, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 13.69, |
|
"grad_norm": 3.6367032527923584, |
|
"learning_rate": 8.63063063063063e-06, |
|
"loss": 1.1244, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 13.72, |
|
"grad_norm": 6.010049819946289, |
|
"learning_rate": 8.627627627627628e-06, |
|
"loss": 1.0784, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 13.75, |
|
"grad_norm": 3.2728335857391357, |
|
"learning_rate": 8.624624624624626e-06, |
|
"loss": 1.0053, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 13.78, |
|
"grad_norm": 4.756971836090088, |
|
"learning_rate": 8.621621621621622e-06, |
|
"loss": 1.0427, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 13.81, |
|
"grad_norm": 3.9494152069091797, |
|
"learning_rate": 8.618618618618619e-06, |
|
"loss": 1.0729, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 13.84, |
|
"grad_norm": 4.257194995880127, |
|
"learning_rate": 8.615615615615616e-06, |
|
"loss": 1.0893, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 13.87, |
|
"grad_norm": 4.306645393371582, |
|
"learning_rate": 8.612612612612612e-06, |
|
"loss": 1.0025, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 13.9, |
|
"grad_norm": 4.929202556610107, |
|
"learning_rate": 8.60960960960961e-06, |
|
"loss": 1.1036, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 13.93, |
|
"grad_norm": 4.744569301605225, |
|
"learning_rate": 8.606606606606607e-06, |
|
"loss": 1.0783, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 13.96, |
|
"grad_norm": 5.201243877410889, |
|
"learning_rate": 8.603603603603605e-06, |
|
"loss": 1.0203, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 13.99, |
|
"grad_norm": 2.8255743980407715, |
|
"learning_rate": 8.600600600600602e-06, |
|
"loss": 1.0548, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"eval_accuracy": 0.9024, |
|
"eval_loss": 0.7641175389289856, |
|
"eval_runtime": 26.4626, |
|
"eval_samples_per_second": 377.892, |
|
"eval_steps_per_second": 1.512, |
|
"step": 4662 |
|
}, |
|
{ |
|
"epoch": 14.02, |
|
"grad_norm": 4.445658206939697, |
|
"learning_rate": 8.597597597597598e-06, |
|
"loss": 1.0731, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 14.05, |
|
"grad_norm": 4.949143409729004, |
|
"learning_rate": 8.594594594594595e-06, |
|
"loss": 1.0116, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 14.08, |
|
"grad_norm": 3.9592392444610596, |
|
"learning_rate": 8.591591591591593e-06, |
|
"loss": 1.0098, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 14.11, |
|
"grad_norm": 4.729857444763184, |
|
"learning_rate": 8.588588588588589e-06, |
|
"loss": 1.0176, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 14.14, |
|
"grad_norm": 4.354032039642334, |
|
"learning_rate": 8.585585585585586e-06, |
|
"loss": 1.0034, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 14.17, |
|
"grad_norm": 5.001422882080078, |
|
"learning_rate": 8.582582582582584e-06, |
|
"loss": 1.0534, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 14.2, |
|
"grad_norm": 4.593340873718262, |
|
"learning_rate": 8.57957957957958e-06, |
|
"loss": 1.1093, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 14.23, |
|
"grad_norm": 4.776356220245361, |
|
"learning_rate": 8.576576576576577e-06, |
|
"loss": 1.1008, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 14.26, |
|
"grad_norm": 4.727325439453125, |
|
"learning_rate": 8.573573573573574e-06, |
|
"loss": 1.0395, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 14.29, |
|
"grad_norm": 5.222812652587891, |
|
"learning_rate": 8.570570570570572e-06, |
|
"loss": 1.0424, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 14.32, |
|
"grad_norm": 4.294810771942139, |
|
"learning_rate": 8.567567567567568e-06, |
|
"loss": 0.9906, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 14.35, |
|
"grad_norm": 3.130147933959961, |
|
"learning_rate": 8.564564564564565e-06, |
|
"loss": 1.0348, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 14.38, |
|
"grad_norm": 4.623157501220703, |
|
"learning_rate": 8.561561561561563e-06, |
|
"loss": 1.0151, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 14.41, |
|
"grad_norm": 7.708536148071289, |
|
"learning_rate": 8.55855855855856e-06, |
|
"loss": 1.0329, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 14.44, |
|
"grad_norm": 2.8359479904174805, |
|
"learning_rate": 8.555555555555556e-06, |
|
"loss": 1.0228, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 14.47, |
|
"grad_norm": 4.290976524353027, |
|
"learning_rate": 8.552552552552553e-06, |
|
"loss": 1.0335, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 14.5, |
|
"grad_norm": 4.422244548797607, |
|
"learning_rate": 8.549549549549551e-06, |
|
"loss": 1.0498, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 14.53, |
|
"grad_norm": 4.271435260772705, |
|
"learning_rate": 8.546546546546547e-06, |
|
"loss": 1.0276, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 14.56, |
|
"grad_norm": 3.2531402111053467, |
|
"learning_rate": 8.543543543543544e-06, |
|
"loss": 1.0197, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 14.59, |
|
"grad_norm": 5.6989946365356445, |
|
"learning_rate": 8.540540540540542e-06, |
|
"loss": 0.9598, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 14.62, |
|
"grad_norm": 5.341368198394775, |
|
"learning_rate": 8.537537537537537e-06, |
|
"loss": 1.021, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 14.65, |
|
"grad_norm": 2.799990653991699, |
|
"learning_rate": 8.534534534534535e-06, |
|
"loss": 0.9969, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 14.68, |
|
"grad_norm": 5.680851936340332, |
|
"learning_rate": 8.531531531531532e-06, |
|
"loss": 1.0417, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 14.71, |
|
"grad_norm": 4.588523864746094, |
|
"learning_rate": 8.52852852852853e-06, |
|
"loss": 0.9967, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 14.74, |
|
"grad_norm": 3.542405843734741, |
|
"learning_rate": 8.525525525525527e-06, |
|
"loss": 1.006, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 14.77, |
|
"grad_norm": 5.6309123039245605, |
|
"learning_rate": 8.522522522522523e-06, |
|
"loss": 0.981, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 14.8, |
|
"grad_norm": 4.7694196701049805, |
|
"learning_rate": 8.51951951951952e-06, |
|
"loss": 1.0281, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 14.83, |
|
"grad_norm": 4.105522632598877, |
|
"learning_rate": 8.516516516516518e-06, |
|
"loss": 1.0287, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 14.86, |
|
"grad_norm": 6.505147933959961, |
|
"learning_rate": 8.513513513513514e-06, |
|
"loss": 1.0281, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 14.89, |
|
"grad_norm": 4.380159854888916, |
|
"learning_rate": 8.510510510510511e-06, |
|
"loss": 0.9798, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 14.92, |
|
"grad_norm": 4.89952278137207, |
|
"learning_rate": 8.507507507507509e-06, |
|
"loss": 1.0214, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 14.95, |
|
"grad_norm": 4.74617862701416, |
|
"learning_rate": 8.504504504504505e-06, |
|
"loss": 1.047, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 14.98, |
|
"grad_norm": 3.973592519760132, |
|
"learning_rate": 8.501501501501502e-06, |
|
"loss": 0.9714, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"eval_accuracy": 0.9069, |
|
"eval_loss": 0.6897292137145996, |
|
"eval_runtime": 26.404, |
|
"eval_samples_per_second": 378.73, |
|
"eval_steps_per_second": 1.515, |
|
"step": 4995 |
|
}, |
|
{ |
|
"epoch": 15.02, |
|
"grad_norm": 3.2270395755767822, |
|
"learning_rate": 8.4984984984985e-06, |
|
"loss": 0.8831, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 15.05, |
|
"grad_norm": 3.8310062885284424, |
|
"learning_rate": 8.495495495495495e-06, |
|
"loss": 0.944, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 15.08, |
|
"grad_norm": 4.674674987792969, |
|
"learning_rate": 8.492492492492493e-06, |
|
"loss": 0.9558, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 15.11, |
|
"grad_norm": 5.555057048797607, |
|
"learning_rate": 8.48948948948949e-06, |
|
"loss": 1.0067, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 15.14, |
|
"grad_norm": 4.9669389724731445, |
|
"learning_rate": 8.486486486486488e-06, |
|
"loss": 0.9521, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 15.17, |
|
"grad_norm": 5.623544692993164, |
|
"learning_rate": 8.483483483483485e-06, |
|
"loss": 0.967, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 15.2, |
|
"grad_norm": 3.8444600105285645, |
|
"learning_rate": 8.480480480480481e-06, |
|
"loss": 0.9233, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 15.23, |
|
"grad_norm": 2.460385322570801, |
|
"learning_rate": 8.477477477477478e-06, |
|
"loss": 1.038, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 15.26, |
|
"grad_norm": 3.19730806350708, |
|
"learning_rate": 8.474474474474476e-06, |
|
"loss": 0.982, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 15.29, |
|
"grad_norm": 4.365762233734131, |
|
"learning_rate": 8.471471471471472e-06, |
|
"loss": 0.9707, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 15.32, |
|
"grad_norm": 3.1159462928771973, |
|
"learning_rate": 8.46846846846847e-06, |
|
"loss": 0.9719, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 15.35, |
|
"grad_norm": 4.497450828552246, |
|
"learning_rate": 8.465465465465467e-06, |
|
"loss": 0.905, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 15.38, |
|
"grad_norm": 4.356529712677002, |
|
"learning_rate": 8.462462462462462e-06, |
|
"loss": 0.9398, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 15.41, |
|
"grad_norm": 5.023562431335449, |
|
"learning_rate": 8.45945945945946e-06, |
|
"loss": 0.96, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 15.44, |
|
"grad_norm": 4.207388877868652, |
|
"learning_rate": 8.456456456456457e-06, |
|
"loss": 0.9279, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 15.47, |
|
"grad_norm": 5.606916904449463, |
|
"learning_rate": 8.453453453453453e-06, |
|
"loss": 0.9059, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 15.5, |
|
"grad_norm": 3.986994743347168, |
|
"learning_rate": 8.45045045045045e-06, |
|
"loss": 0.9314, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 15.53, |
|
"grad_norm": 3.543445587158203, |
|
"learning_rate": 8.447447447447448e-06, |
|
"loss": 0.9505, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 15.56, |
|
"grad_norm": 4.311727046966553, |
|
"learning_rate": 8.444444444444446e-06, |
|
"loss": 0.9104, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 15.59, |
|
"grad_norm": 4.540422439575195, |
|
"learning_rate": 8.441441441441443e-06, |
|
"loss": 0.9093, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 15.62, |
|
"grad_norm": 4.569993495941162, |
|
"learning_rate": 8.438438438438439e-06, |
|
"loss": 0.919, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 15.65, |
|
"grad_norm": 4.752470016479492, |
|
"learning_rate": 8.435435435435436e-06, |
|
"loss": 0.8959, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 15.68, |
|
"grad_norm": 3.8412997722625732, |
|
"learning_rate": 8.432432432432434e-06, |
|
"loss": 0.91, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 15.71, |
|
"grad_norm": 4.719942092895508, |
|
"learning_rate": 8.42942942942943e-06, |
|
"loss": 0.9158, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 15.74, |
|
"grad_norm": 4.649778366088867, |
|
"learning_rate": 8.426426426426427e-06, |
|
"loss": 0.8962, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 15.77, |
|
"grad_norm": 6.160231113433838, |
|
"learning_rate": 8.423423423423423e-06, |
|
"loss": 0.9212, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 15.8, |
|
"grad_norm": 4.66862678527832, |
|
"learning_rate": 8.42042042042042e-06, |
|
"loss": 0.9635, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 15.83, |
|
"grad_norm": 8.437322616577148, |
|
"learning_rate": 8.417417417417418e-06, |
|
"loss": 0.8503, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 15.86, |
|
"grad_norm": 6.350149631500244, |
|
"learning_rate": 8.414414414414415e-06, |
|
"loss": 0.8894, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 15.89, |
|
"grad_norm": 4.726019382476807, |
|
"learning_rate": 8.411411411411413e-06, |
|
"loss": 0.9115, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 15.92, |
|
"grad_norm": 3.8601720333099365, |
|
"learning_rate": 8.408408408408409e-06, |
|
"loss": 0.9795, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 15.95, |
|
"grad_norm": 3.991065502166748, |
|
"learning_rate": 8.405405405405406e-06, |
|
"loss": 0.9379, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 15.98, |
|
"grad_norm": 4.6855316162109375, |
|
"learning_rate": 8.402402402402404e-06, |
|
"loss": 0.9141, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"eval_accuracy": 0.9047, |
|
"eval_loss": 0.6327019333839417, |
|
"eval_runtime": 26.6918, |
|
"eval_samples_per_second": 374.647, |
|
"eval_steps_per_second": 1.499, |
|
"step": 5328 |
|
}, |
|
{ |
|
"epoch": 16.01, |
|
"grad_norm": 6.888444423675537, |
|
"learning_rate": 8.399399399399401e-06, |
|
"loss": 0.9849, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 16.04, |
|
"grad_norm": 4.066012382507324, |
|
"learning_rate": 8.396396396396397e-06, |
|
"loss": 0.9008, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 16.07, |
|
"grad_norm": 3.7079455852508545, |
|
"learning_rate": 8.393393393393394e-06, |
|
"loss": 0.9026, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 16.1, |
|
"grad_norm": 4.255161285400391, |
|
"learning_rate": 8.390390390390392e-06, |
|
"loss": 0.9208, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 16.13, |
|
"grad_norm": 4.1778645515441895, |
|
"learning_rate": 8.387387387387388e-06, |
|
"loss": 0.9302, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 16.16, |
|
"grad_norm": 3.891451835632324, |
|
"learning_rate": 8.384384384384385e-06, |
|
"loss": 0.932, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 16.19, |
|
"grad_norm": 5.1825947761535645, |
|
"learning_rate": 8.381381381381381e-06, |
|
"loss": 0.8951, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 16.22, |
|
"grad_norm": 5.37294864654541, |
|
"learning_rate": 8.378378378378378e-06, |
|
"loss": 0.8875, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 16.25, |
|
"grad_norm": 6.806169509887695, |
|
"learning_rate": 8.375375375375376e-06, |
|
"loss": 0.9221, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 16.28, |
|
"grad_norm": 5.307746887207031, |
|
"learning_rate": 8.372372372372373e-06, |
|
"loss": 0.9109, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 16.31, |
|
"grad_norm": 5.282186985015869, |
|
"learning_rate": 8.36936936936937e-06, |
|
"loss": 0.8565, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 16.34, |
|
"grad_norm": 4.689208030700684, |
|
"learning_rate": 8.366366366366368e-06, |
|
"loss": 0.9608, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 16.37, |
|
"grad_norm": 6.57575798034668, |
|
"learning_rate": 8.363363363363364e-06, |
|
"loss": 0.8458, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 16.4, |
|
"grad_norm": 7.146213531494141, |
|
"learning_rate": 8.360360360360362e-06, |
|
"loss": 0.9741, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 16.43, |
|
"grad_norm": 5.553043842315674, |
|
"learning_rate": 8.357357357357359e-06, |
|
"loss": 0.9529, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 16.46, |
|
"grad_norm": 5.228372097015381, |
|
"learning_rate": 8.354354354354355e-06, |
|
"loss": 0.8963, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 16.49, |
|
"grad_norm": 3.6776034832000732, |
|
"learning_rate": 8.351351351351352e-06, |
|
"loss": 0.9088, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 16.52, |
|
"grad_norm": 5.969200134277344, |
|
"learning_rate": 8.348348348348348e-06, |
|
"loss": 0.8746, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 16.55, |
|
"grad_norm": 4.356595039367676, |
|
"learning_rate": 8.345345345345346e-06, |
|
"loss": 0.882, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 16.58, |
|
"grad_norm": 4.669258117675781, |
|
"learning_rate": 8.342342342342343e-06, |
|
"loss": 0.8578, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 16.61, |
|
"grad_norm": 5.297147750854492, |
|
"learning_rate": 8.339339339339339e-06, |
|
"loss": 0.8958, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 16.64, |
|
"grad_norm": 4.6578216552734375, |
|
"learning_rate": 8.336336336336336e-06, |
|
"loss": 0.8829, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 16.67, |
|
"grad_norm": 2.4359843730926514, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 0.8524, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 16.7, |
|
"grad_norm": 5.646344184875488, |
|
"learning_rate": 8.330330330330331e-06, |
|
"loss": 0.8514, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 16.73, |
|
"grad_norm": 3.243112564086914, |
|
"learning_rate": 8.327327327327329e-06, |
|
"loss": 0.8425, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 16.76, |
|
"grad_norm": 4.088650703430176, |
|
"learning_rate": 8.324324324324326e-06, |
|
"loss": 0.8675, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 16.79, |
|
"grad_norm": 4.98700475692749, |
|
"learning_rate": 8.321321321321322e-06, |
|
"loss": 0.8815, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 16.82, |
|
"grad_norm": 4.231012344360352, |
|
"learning_rate": 8.31831831831832e-06, |
|
"loss": 0.8928, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 16.85, |
|
"grad_norm": 5.899864196777344, |
|
"learning_rate": 8.315315315315317e-06, |
|
"loss": 0.8743, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 16.88, |
|
"grad_norm": 6.303285598754883, |
|
"learning_rate": 8.312312312312313e-06, |
|
"loss": 0.8809, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 16.91, |
|
"grad_norm": 6.3348164558410645, |
|
"learning_rate": 8.30930930930931e-06, |
|
"loss": 0.9054, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 16.94, |
|
"grad_norm": 5.820091724395752, |
|
"learning_rate": 8.306306306306306e-06, |
|
"loss": 0.8742, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 16.97, |
|
"grad_norm": 4.7649383544921875, |
|
"learning_rate": 8.303303303303303e-06, |
|
"loss": 0.9408, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"grad_norm": 5.467824935913086, |
|
"learning_rate": 8.300300300300301e-06, |
|
"loss": 0.8937, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"eval_accuracy": 0.9065, |
|
"eval_loss": 0.5862383842468262, |
|
"eval_runtime": 26.5146, |
|
"eval_samples_per_second": 377.151, |
|
"eval_steps_per_second": 1.509, |
|
"step": 5661 |
|
}, |
|
{ |
|
"epoch": 17.03, |
|
"grad_norm": 34.90660858154297, |
|
"learning_rate": 8.297297297297298e-06, |
|
"loss": 0.9659, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 17.06, |
|
"grad_norm": 4.263108730316162, |
|
"learning_rate": 8.294294294294294e-06, |
|
"loss": 0.8664, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 17.09, |
|
"grad_norm": 5.2203803062438965, |
|
"learning_rate": 8.291291291291292e-06, |
|
"loss": 0.8954, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 17.12, |
|
"grad_norm": 5.210129261016846, |
|
"learning_rate": 8.288288288288289e-06, |
|
"loss": 0.843, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 17.15, |
|
"grad_norm": 5.5564446449279785, |
|
"learning_rate": 8.285285285285287e-06, |
|
"loss": 0.8385, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 17.18, |
|
"grad_norm": 4.366509437561035, |
|
"learning_rate": 8.282282282282284e-06, |
|
"loss": 0.8602, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 17.21, |
|
"grad_norm": 4.130090236663818, |
|
"learning_rate": 8.27927927927928e-06, |
|
"loss": 0.8346, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 17.24, |
|
"grad_norm": 5.142440319061279, |
|
"learning_rate": 8.276276276276277e-06, |
|
"loss": 0.8611, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 17.27, |
|
"grad_norm": 4.728271961212158, |
|
"learning_rate": 8.273273273273273e-06, |
|
"loss": 0.8361, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 17.3, |
|
"grad_norm": 5.093695640563965, |
|
"learning_rate": 8.27027027027027e-06, |
|
"loss": 0.8924, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 17.33, |
|
"grad_norm": 3.6658284664154053, |
|
"learning_rate": 8.267267267267268e-06, |
|
"loss": 0.9114, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 17.36, |
|
"grad_norm": 5.7932891845703125, |
|
"learning_rate": 8.264264264264264e-06, |
|
"loss": 0.8824, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 17.39, |
|
"grad_norm": 4.216789722442627, |
|
"learning_rate": 8.261261261261261e-06, |
|
"loss": 0.8199, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 17.42, |
|
"grad_norm": 6.029932022094727, |
|
"learning_rate": 8.258258258258259e-06, |
|
"loss": 0.9022, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 17.45, |
|
"grad_norm": 5.9107985496521, |
|
"learning_rate": 8.255255255255256e-06, |
|
"loss": 0.8622, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 17.48, |
|
"grad_norm": 3.0279664993286133, |
|
"learning_rate": 8.252252252252254e-06, |
|
"loss": 0.7735, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 17.51, |
|
"grad_norm": 4.147484302520752, |
|
"learning_rate": 8.24924924924925e-06, |
|
"loss": 0.8706, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 17.54, |
|
"grad_norm": 4.788569450378418, |
|
"learning_rate": 8.246246246246247e-06, |
|
"loss": 0.8346, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 17.57, |
|
"grad_norm": 5.4136738777160645, |
|
"learning_rate": 8.243243243243245e-06, |
|
"loss": 0.8235, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 17.6, |
|
"grad_norm": 5.190497875213623, |
|
"learning_rate": 8.24024024024024e-06, |
|
"loss": 0.8227, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 17.63, |
|
"grad_norm": 3.4695847034454346, |
|
"learning_rate": 8.237237237237238e-06, |
|
"loss": 0.8652, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 17.66, |
|
"grad_norm": 4.851005554199219, |
|
"learning_rate": 8.234234234234235e-06, |
|
"loss": 0.8731, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 17.69, |
|
"grad_norm": 4.962414264678955, |
|
"learning_rate": 8.231231231231231e-06, |
|
"loss": 0.842, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 17.72, |
|
"grad_norm": 5.408501148223877, |
|
"learning_rate": 8.228228228228229e-06, |
|
"loss": 0.8513, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 17.75, |
|
"grad_norm": 4.542473316192627, |
|
"learning_rate": 8.225225225225226e-06, |
|
"loss": 0.8533, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 17.78, |
|
"grad_norm": 4.716324806213379, |
|
"learning_rate": 8.222222222222222e-06, |
|
"loss": 0.832, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 17.81, |
|
"grad_norm": 2.916837692260742, |
|
"learning_rate": 8.21921921921922e-06, |
|
"loss": 0.8694, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 17.84, |
|
"grad_norm": 5.19083309173584, |
|
"learning_rate": 8.216216216216217e-06, |
|
"loss": 0.8215, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 17.87, |
|
"grad_norm": 4.947284698486328, |
|
"learning_rate": 8.213213213213214e-06, |
|
"loss": 0.7846, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 17.9, |
|
"grad_norm": 4.968457221984863, |
|
"learning_rate": 8.210210210210212e-06, |
|
"loss": 0.7975, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 17.93, |
|
"grad_norm": 3.813506841659546, |
|
"learning_rate": 8.20720720720721e-06, |
|
"loss": 0.8211, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 17.96, |
|
"grad_norm": 4.17324161529541, |
|
"learning_rate": 8.204204204204205e-06, |
|
"loss": 0.8244, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 17.99, |
|
"grad_norm": 7.182973861694336, |
|
"learning_rate": 8.201201201201202e-06, |
|
"loss": 0.79, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"eval_accuracy": 0.9104, |
|
"eval_loss": 0.5389306545257568, |
|
"eval_runtime": 26.2611, |
|
"eval_samples_per_second": 380.792, |
|
"eval_steps_per_second": 1.523, |
|
"step": 5994 |
|
}, |
|
{ |
|
"epoch": 18.02, |
|
"grad_norm": 4.469026565551758, |
|
"learning_rate": 8.198198198198198e-06, |
|
"loss": 0.7846, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 18.05, |
|
"grad_norm": 5.259685039520264, |
|
"learning_rate": 8.195195195195196e-06, |
|
"loss": 0.7855, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 18.08, |
|
"grad_norm": 4.313201904296875, |
|
"learning_rate": 8.192192192192193e-06, |
|
"loss": 0.829, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 18.11, |
|
"grad_norm": 4.317193508148193, |
|
"learning_rate": 8.189189189189189e-06, |
|
"loss": 0.7573, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 18.14, |
|
"grad_norm": 5.022390842437744, |
|
"learning_rate": 8.186186186186186e-06, |
|
"loss": 0.7977, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 18.17, |
|
"grad_norm": 4.285593032836914, |
|
"learning_rate": 8.183183183183184e-06, |
|
"loss": 0.8062, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 18.2, |
|
"grad_norm": 5.628978729248047, |
|
"learning_rate": 8.18018018018018e-06, |
|
"loss": 0.7889, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 18.23, |
|
"grad_norm": 3.628568172454834, |
|
"learning_rate": 8.177177177177177e-06, |
|
"loss": 0.827, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 18.26, |
|
"grad_norm": 5.467437744140625, |
|
"learning_rate": 8.174174174174175e-06, |
|
"loss": 0.8763, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 18.29, |
|
"grad_norm": 4.808385372161865, |
|
"learning_rate": 8.171171171171172e-06, |
|
"loss": 0.8299, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 18.32, |
|
"grad_norm": 7.098764896392822, |
|
"learning_rate": 8.16816816816817e-06, |
|
"loss": 0.8324, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 18.35, |
|
"grad_norm": 4.591148853302002, |
|
"learning_rate": 8.165165165165165e-06, |
|
"loss": 0.7832, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 18.38, |
|
"grad_norm": 4.7516913414001465, |
|
"learning_rate": 8.162162162162163e-06, |
|
"loss": 0.7704, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 18.41, |
|
"grad_norm": 4.104935169219971, |
|
"learning_rate": 8.15915915915916e-06, |
|
"loss": 0.8418, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 18.44, |
|
"grad_norm": 4.477002143859863, |
|
"learning_rate": 8.156156156156156e-06, |
|
"loss": 0.7522, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 18.47, |
|
"grad_norm": 4.729291915893555, |
|
"learning_rate": 8.153153153153154e-06, |
|
"loss": 0.7396, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 18.5, |
|
"grad_norm": 6.148501396179199, |
|
"learning_rate": 8.150150150150151e-06, |
|
"loss": 0.7802, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 18.53, |
|
"grad_norm": 4.43754768371582, |
|
"learning_rate": 8.147147147147147e-06, |
|
"loss": 0.7856, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 18.56, |
|
"grad_norm": 5.1505231857299805, |
|
"learning_rate": 8.144144144144144e-06, |
|
"loss": 0.7998, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 18.59, |
|
"grad_norm": 4.7718305587768555, |
|
"learning_rate": 8.141141141141142e-06, |
|
"loss": 0.835, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 18.62, |
|
"grad_norm": 3.4573163986206055, |
|
"learning_rate": 8.13813813813814e-06, |
|
"loss": 0.7804, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 18.65, |
|
"grad_norm": 4.4927802085876465, |
|
"learning_rate": 8.135135135135137e-06, |
|
"loss": 0.7563, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 18.68, |
|
"grad_norm": 5.646451473236084, |
|
"learning_rate": 8.132132132132133e-06, |
|
"loss": 0.8245, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 18.71, |
|
"grad_norm": 4.9558892250061035, |
|
"learning_rate": 8.12912912912913e-06, |
|
"loss": 0.7667, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 18.74, |
|
"grad_norm": 3.646571159362793, |
|
"learning_rate": 8.126126126126128e-06, |
|
"loss": 0.8132, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 18.77, |
|
"grad_norm": 3.691779851913452, |
|
"learning_rate": 8.123123123123123e-06, |
|
"loss": 0.8171, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 18.8, |
|
"grad_norm": 4.477748394012451, |
|
"learning_rate": 8.12012012012012e-06, |
|
"loss": 0.8198, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 18.83, |
|
"grad_norm": 4.975278377532959, |
|
"learning_rate": 8.117117117117118e-06, |
|
"loss": 0.8123, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 18.86, |
|
"grad_norm": 4.925590515136719, |
|
"learning_rate": 8.114114114114114e-06, |
|
"loss": 0.8034, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 18.89, |
|
"grad_norm": 3.9182472229003906, |
|
"learning_rate": 8.111111111111112e-06, |
|
"loss": 0.7115, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 18.92, |
|
"grad_norm": 10.641616821289062, |
|
"learning_rate": 8.108108108108109e-06, |
|
"loss": 0.794, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 18.95, |
|
"grad_norm": 4.6885786056518555, |
|
"learning_rate": 8.105105105105105e-06, |
|
"loss": 0.8127, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 18.98, |
|
"grad_norm": 5.1985578536987305, |
|
"learning_rate": 8.102102102102102e-06, |
|
"loss": 0.6761, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"eval_accuracy": 0.9075, |
|
"eval_loss": 0.5091767907142639, |
|
"eval_runtime": 26.335, |
|
"eval_samples_per_second": 379.723, |
|
"eval_steps_per_second": 1.519, |
|
"step": 6327 |
|
}, |
|
{ |
|
"epoch": 19.01, |
|
"grad_norm": 3.5245776176452637, |
|
"learning_rate": 8.0990990990991e-06, |
|
"loss": 0.8029, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 19.04, |
|
"grad_norm": 4.780681133270264, |
|
"learning_rate": 8.096096096096097e-06, |
|
"loss": 0.8413, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 19.07, |
|
"grad_norm": 6.333914279937744, |
|
"learning_rate": 8.093093093093095e-06, |
|
"loss": 0.7798, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 19.1, |
|
"grad_norm": 6.965935707092285, |
|
"learning_rate": 8.09009009009009e-06, |
|
"loss": 0.7757, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 19.13, |
|
"grad_norm": 6.49807071685791, |
|
"learning_rate": 8.087087087087088e-06, |
|
"loss": 0.7936, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 19.16, |
|
"grad_norm": 3.88280987739563, |
|
"learning_rate": 8.084084084084085e-06, |
|
"loss": 0.6997, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 19.19, |
|
"grad_norm": 5.297958850860596, |
|
"learning_rate": 8.081081081081081e-06, |
|
"loss": 0.7622, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 19.22, |
|
"grad_norm": 4.647511959075928, |
|
"learning_rate": 8.078078078078079e-06, |
|
"loss": 0.7805, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 19.25, |
|
"grad_norm": 4.701191425323486, |
|
"learning_rate": 8.075075075075076e-06, |
|
"loss": 0.7882, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 19.28, |
|
"grad_norm": 5.120426177978516, |
|
"learning_rate": 8.072072072072072e-06, |
|
"loss": 0.767, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 19.31, |
|
"grad_norm": 4.553008079528809, |
|
"learning_rate": 8.06906906906907e-06, |
|
"loss": 0.7741, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 19.34, |
|
"grad_norm": 5.475098133087158, |
|
"learning_rate": 8.066066066066067e-06, |
|
"loss": 0.7638, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 19.37, |
|
"grad_norm": 3.622540235519409, |
|
"learning_rate": 8.063063063063063e-06, |
|
"loss": 0.7792, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 19.4, |
|
"grad_norm": 5.9133076667785645, |
|
"learning_rate": 8.06006006006006e-06, |
|
"loss": 0.7559, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 19.43, |
|
"grad_norm": 4.649819850921631, |
|
"learning_rate": 8.057057057057058e-06, |
|
"loss": 0.7506, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 19.46, |
|
"grad_norm": 4.202900409698486, |
|
"learning_rate": 8.054054054054055e-06, |
|
"loss": 0.714, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 19.49, |
|
"grad_norm": 6.11083459854126, |
|
"learning_rate": 8.051051051051053e-06, |
|
"loss": 0.8234, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 19.52, |
|
"grad_norm": 5.627078533172607, |
|
"learning_rate": 8.048048048048048e-06, |
|
"loss": 0.7255, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 19.55, |
|
"grad_norm": 3.8992910385131836, |
|
"learning_rate": 8.045045045045046e-06, |
|
"loss": 0.8122, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 19.58, |
|
"grad_norm": 5.471251010894775, |
|
"learning_rate": 8.042042042042043e-06, |
|
"loss": 0.7491, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 19.61, |
|
"grad_norm": 4.810721397399902, |
|
"learning_rate": 8.03903903903904e-06, |
|
"loss": 0.7327, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 19.64, |
|
"grad_norm": 4.824802875518799, |
|
"learning_rate": 8.036036036036037e-06, |
|
"loss": 0.7356, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 19.67, |
|
"grad_norm": 4.189336776733398, |
|
"learning_rate": 8.033033033033034e-06, |
|
"loss": 0.7634, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 19.7, |
|
"grad_norm": 6.21077299118042, |
|
"learning_rate": 8.03003003003003e-06, |
|
"loss": 0.7516, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 19.73, |
|
"grad_norm": 4.769148349761963, |
|
"learning_rate": 8.027027027027027e-06, |
|
"loss": 0.7596, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 19.76, |
|
"grad_norm": 4.551455020904541, |
|
"learning_rate": 8.024024024024025e-06, |
|
"loss": 0.7268, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 19.79, |
|
"grad_norm": 5.299874782562256, |
|
"learning_rate": 8.021021021021022e-06, |
|
"loss": 0.699, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 19.82, |
|
"grad_norm": 4.511897563934326, |
|
"learning_rate": 8.018018018018018e-06, |
|
"loss": 0.8107, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 19.85, |
|
"grad_norm": 3.8506810665130615, |
|
"learning_rate": 8.015015015015016e-06, |
|
"loss": 0.8022, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 19.88, |
|
"grad_norm": 4.714992523193359, |
|
"learning_rate": 8.012012012012013e-06, |
|
"loss": 0.7327, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 19.91, |
|
"grad_norm": 5.256269931793213, |
|
"learning_rate": 8.00900900900901e-06, |
|
"loss": 0.7623, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 19.94, |
|
"grad_norm": 4.982200622558594, |
|
"learning_rate": 8.006006006006006e-06, |
|
"loss": 0.7532, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 19.97, |
|
"grad_norm": 4.6494574546813965, |
|
"learning_rate": 8.003003003003004e-06, |
|
"loss": 0.7429, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"grad_norm": 21.53489875793457, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.7064, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"eval_accuracy": 0.9162, |
|
"eval_loss": 0.4759812355041504, |
|
"eval_runtime": 26.709, |
|
"eval_samples_per_second": 374.405, |
|
"eval_steps_per_second": 1.498, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 20.03, |
|
"grad_norm": 4.216804504394531, |
|
"learning_rate": 7.996996996996997e-06, |
|
"loss": 0.75, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 20.06, |
|
"grad_norm": 3.8654181957244873, |
|
"learning_rate": 7.993993993993995e-06, |
|
"loss": 0.7137, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 20.09, |
|
"grad_norm": 6.066682815551758, |
|
"learning_rate": 7.990990990990992e-06, |
|
"loss": 0.7759, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 20.12, |
|
"grad_norm": 3.9181618690490723, |
|
"learning_rate": 7.987987987987988e-06, |
|
"loss": 0.7022, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 20.15, |
|
"grad_norm": 4.61527156829834, |
|
"learning_rate": 7.984984984984985e-06, |
|
"loss": 0.6314, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 20.18, |
|
"grad_norm": 5.603513717651367, |
|
"learning_rate": 7.981981981981983e-06, |
|
"loss": 0.7045, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 20.21, |
|
"grad_norm": 3.54504132270813, |
|
"learning_rate": 7.97897897897898e-06, |
|
"loss": 0.7426, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 20.24, |
|
"grad_norm": 6.494993686676025, |
|
"learning_rate": 7.975975975975978e-06, |
|
"loss": 0.7779, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 20.27, |
|
"grad_norm": 4.856626510620117, |
|
"learning_rate": 7.972972972972974e-06, |
|
"loss": 0.7332, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 20.3, |
|
"grad_norm": 4.99015474319458, |
|
"learning_rate": 7.969969969969971e-06, |
|
"loss": 0.7016, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 20.33, |
|
"grad_norm": 4.442424297332764, |
|
"learning_rate": 7.966966966966969e-06, |
|
"loss": 0.7601, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 20.36, |
|
"grad_norm": 4.10810661315918, |
|
"learning_rate": 7.963963963963964e-06, |
|
"loss": 0.8095, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 20.39, |
|
"grad_norm": 5.386140823364258, |
|
"learning_rate": 7.960960960960962e-06, |
|
"loss": 0.7368, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 20.42, |
|
"grad_norm": 3.9678328037261963, |
|
"learning_rate": 7.95795795795796e-06, |
|
"loss": 0.7755, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 20.45, |
|
"grad_norm": 4.667354583740234, |
|
"learning_rate": 7.954954954954955e-06, |
|
"loss": 0.754, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 20.48, |
|
"grad_norm": 4.3042683601379395, |
|
"learning_rate": 7.951951951951953e-06, |
|
"loss": 0.7509, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 20.51, |
|
"grad_norm": 4.8756585121154785, |
|
"learning_rate": 7.948948948948948e-06, |
|
"loss": 0.7243, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 20.54, |
|
"grad_norm": 4.626062870025635, |
|
"learning_rate": 7.945945945945946e-06, |
|
"loss": 0.7072, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 20.57, |
|
"grad_norm": 7.135468482971191, |
|
"learning_rate": 7.942942942942943e-06, |
|
"loss": 0.6947, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 20.6, |
|
"grad_norm": 6.38662576675415, |
|
"learning_rate": 7.93993993993994e-06, |
|
"loss": 0.7602, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 20.63, |
|
"grad_norm": 5.721635818481445, |
|
"learning_rate": 7.936936936936938e-06, |
|
"loss": 0.668, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 20.66, |
|
"grad_norm": 4.916971683502197, |
|
"learning_rate": 7.933933933933936e-06, |
|
"loss": 0.6956, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 20.69, |
|
"grad_norm": 3.4424617290496826, |
|
"learning_rate": 7.930930930930931e-06, |
|
"loss": 0.7335, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 20.72, |
|
"grad_norm": 4.152014255523682, |
|
"learning_rate": 7.927927927927929e-06, |
|
"loss": 0.6986, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 20.75, |
|
"grad_norm": 5.503407001495361, |
|
"learning_rate": 7.924924924924926e-06, |
|
"loss": 0.7834, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 20.78, |
|
"grad_norm": 4.274868011474609, |
|
"learning_rate": 7.921921921921922e-06, |
|
"loss": 0.6885, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 20.81, |
|
"grad_norm": 4.806682109832764, |
|
"learning_rate": 7.91891891891892e-06, |
|
"loss": 0.6944, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 20.84, |
|
"grad_norm": 10.31600284576416, |
|
"learning_rate": 7.915915915915915e-06, |
|
"loss": 0.7379, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 20.87, |
|
"grad_norm": 5.682564735412598, |
|
"learning_rate": 7.912912912912913e-06, |
|
"loss": 0.7691, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 20.9, |
|
"grad_norm": 6.624948978424072, |
|
"learning_rate": 7.90990990990991e-06, |
|
"loss": 0.7127, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 20.93, |
|
"grad_norm": 4.5933146476745605, |
|
"learning_rate": 7.906906906906908e-06, |
|
"loss": 0.7696, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 20.96, |
|
"grad_norm": 5.328336238861084, |
|
"learning_rate": 7.903903903903904e-06, |
|
"loss": 0.7376, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 20.99, |
|
"grad_norm": 5.580850601196289, |
|
"learning_rate": 7.900900900900901e-06, |
|
"loss": 0.7224, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 21.0, |
|
"eval_accuracy": 0.9127, |
|
"eval_loss": 0.45015576481819153, |
|
"eval_runtime": 26.4818, |
|
"eval_samples_per_second": 377.619, |
|
"eval_steps_per_second": 1.51, |
|
"step": 6993 |
|
}, |
|
{ |
|
"epoch": 21.02, |
|
"grad_norm": 4.94426965713501, |
|
"learning_rate": 7.897897897897899e-06, |
|
"loss": 0.89, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 21.05, |
|
"grad_norm": 4.805978298187256, |
|
"learning_rate": 7.894894894894896e-06, |
|
"loss": 0.7465, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 21.08, |
|
"grad_norm": 2.917875051498413, |
|
"learning_rate": 7.891891891891894e-06, |
|
"loss": 0.6913, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 21.11, |
|
"grad_norm": 5.422092914581299, |
|
"learning_rate": 7.88888888888889e-06, |
|
"loss": 0.6617, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 21.14, |
|
"grad_norm": 4.472092628479004, |
|
"learning_rate": 7.885885885885887e-06, |
|
"loss": 0.6749, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 21.17, |
|
"grad_norm": 4.079427242279053, |
|
"learning_rate": 7.882882882882884e-06, |
|
"loss": 0.6885, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 21.2, |
|
"grad_norm": 5.0683746337890625, |
|
"learning_rate": 7.87987987987988e-06, |
|
"loss": 0.7106, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 21.23, |
|
"grad_norm": 3.5775668621063232, |
|
"learning_rate": 7.876876876876878e-06, |
|
"loss": 0.7099, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 21.26, |
|
"grad_norm": 4.4518609046936035, |
|
"learning_rate": 7.873873873873873e-06, |
|
"loss": 0.6964, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 21.29, |
|
"grad_norm": 5.447687149047852, |
|
"learning_rate": 7.870870870870871e-06, |
|
"loss": 0.7655, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 21.32, |
|
"grad_norm": 5.607600212097168, |
|
"learning_rate": 7.867867867867868e-06, |
|
"loss": 0.6468, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 21.35, |
|
"grad_norm": 4.0597405433654785, |
|
"learning_rate": 7.864864864864866e-06, |
|
"loss": 0.7164, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 21.38, |
|
"grad_norm": 3.5956780910491943, |
|
"learning_rate": 7.861861861861863e-06, |
|
"loss": 0.6696, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 21.41, |
|
"grad_norm": 5.072645664215088, |
|
"learning_rate": 7.858858858858859e-06, |
|
"loss": 0.7251, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 21.44, |
|
"grad_norm": 5.149384021759033, |
|
"learning_rate": 7.855855855855857e-06, |
|
"loss": 0.7253, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 21.47, |
|
"grad_norm": 4.9628729820251465, |
|
"learning_rate": 7.852852852852854e-06, |
|
"loss": 0.6974, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 21.5, |
|
"grad_norm": 5.860617637634277, |
|
"learning_rate": 7.849849849849852e-06, |
|
"loss": 0.6898, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 21.53, |
|
"grad_norm": 3.291947364807129, |
|
"learning_rate": 7.846846846846847e-06, |
|
"loss": 0.7617, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 21.56, |
|
"grad_norm": 4.9691643714904785, |
|
"learning_rate": 7.843843843843845e-06, |
|
"loss": 0.6942, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 21.59, |
|
"grad_norm": 3.975054979324341, |
|
"learning_rate": 7.84084084084084e-06, |
|
"loss": 0.7012, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 21.62, |
|
"grad_norm": 5.745656967163086, |
|
"learning_rate": 7.837837837837838e-06, |
|
"loss": 0.6321, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 21.65, |
|
"grad_norm": 4.987421989440918, |
|
"learning_rate": 7.834834834834836e-06, |
|
"loss": 0.7249, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 21.68, |
|
"grad_norm": 3.740715980529785, |
|
"learning_rate": 7.831831831831831e-06, |
|
"loss": 0.7071, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 21.71, |
|
"grad_norm": 3.934730052947998, |
|
"learning_rate": 7.828828828828829e-06, |
|
"loss": 0.6625, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 21.74, |
|
"grad_norm": 4.433104991912842, |
|
"learning_rate": 7.825825825825826e-06, |
|
"loss": 0.7322, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 21.77, |
|
"grad_norm": 5.523029327392578, |
|
"learning_rate": 7.822822822822824e-06, |
|
"loss": 0.7235, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 21.8, |
|
"grad_norm": 5.185804843902588, |
|
"learning_rate": 7.819819819819821e-06, |
|
"loss": 0.7276, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 21.83, |
|
"grad_norm": 3.194908857345581, |
|
"learning_rate": 7.816816816816819e-06, |
|
"loss": 0.6634, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 21.86, |
|
"grad_norm": 4.772044658660889, |
|
"learning_rate": 7.813813813813815e-06, |
|
"loss": 0.694, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 21.89, |
|
"grad_norm": 8.646495819091797, |
|
"learning_rate": 7.810810810810812e-06, |
|
"loss": 0.6685, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 21.92, |
|
"grad_norm": 5.639026165008545, |
|
"learning_rate": 7.807807807807808e-06, |
|
"loss": 0.6577, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 21.95, |
|
"grad_norm": 3.9437367916107178, |
|
"learning_rate": 7.804804804804805e-06, |
|
"loss": 0.7355, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 21.98, |
|
"grad_norm": 7.671295166015625, |
|
"learning_rate": 7.801801801801803e-06, |
|
"loss": 0.712, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 22.0, |
|
"eval_accuracy": 0.913, |
|
"eval_loss": 0.42887207865715027, |
|
"eval_runtime": 25.909, |
|
"eval_samples_per_second": 385.966, |
|
"eval_steps_per_second": 1.544, |
|
"step": 7326 |
|
}, |
|
{ |
|
"epoch": 22.01, |
|
"grad_norm": 5.855823993682861, |
|
"learning_rate": 7.798798798798799e-06, |
|
"loss": 0.7903, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 22.04, |
|
"grad_norm": 4.0135931968688965, |
|
"learning_rate": 7.795795795795796e-06, |
|
"loss": 0.6977, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 22.07, |
|
"grad_norm": 5.877246856689453, |
|
"learning_rate": 7.792792792792793e-06, |
|
"loss": 0.698, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 22.1, |
|
"grad_norm": 4.86218786239624, |
|
"learning_rate": 7.78978978978979e-06, |
|
"loss": 0.6627, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 22.13, |
|
"grad_norm": 4.43895149230957, |
|
"learning_rate": 7.786786786786787e-06, |
|
"loss": 0.6337, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 22.16, |
|
"grad_norm": 6.09828519821167, |
|
"learning_rate": 7.783783783783784e-06, |
|
"loss": 0.6661, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 22.19, |
|
"grad_norm": 3.708390235900879, |
|
"learning_rate": 7.780780780780782e-06, |
|
"loss": 0.6576, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 22.22, |
|
"grad_norm": 3.6503195762634277, |
|
"learning_rate": 7.77777777777778e-06, |
|
"loss": 0.665, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 22.25, |
|
"grad_norm": 5.96017599105835, |
|
"learning_rate": 7.774774774774777e-06, |
|
"loss": 0.6926, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 22.28, |
|
"grad_norm": 4.6361823081970215, |
|
"learning_rate": 7.771771771771772e-06, |
|
"loss": 0.6541, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 22.31, |
|
"grad_norm": 6.2728657722473145, |
|
"learning_rate": 7.76876876876877e-06, |
|
"loss": 0.6683, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 22.34, |
|
"grad_norm": 4.679554462432861, |
|
"learning_rate": 7.765765765765766e-06, |
|
"loss": 0.6363, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 22.37, |
|
"grad_norm": 5.297094821929932, |
|
"learning_rate": 7.762762762762763e-06, |
|
"loss": 0.6763, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 22.4, |
|
"grad_norm": 5.149691581726074, |
|
"learning_rate": 7.75975975975976e-06, |
|
"loss": 0.6713, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 22.43, |
|
"grad_norm": 4.853106498718262, |
|
"learning_rate": 7.756756756756756e-06, |
|
"loss": 0.6743, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 22.46, |
|
"grad_norm": 4.042623996734619, |
|
"learning_rate": 7.753753753753754e-06, |
|
"loss": 0.7152, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 22.49, |
|
"grad_norm": 4.078772068023682, |
|
"learning_rate": 7.750750750750751e-06, |
|
"loss": 0.705, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 22.52, |
|
"grad_norm": 6.992520332336426, |
|
"learning_rate": 7.747747747747749e-06, |
|
"loss": 0.6737, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 22.55, |
|
"grad_norm": 4.892293930053711, |
|
"learning_rate": 7.744744744744745e-06, |
|
"loss": 0.6685, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 22.58, |
|
"grad_norm": 3.3463852405548096, |
|
"learning_rate": 7.741741741741742e-06, |
|
"loss": 0.6417, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 22.61, |
|
"grad_norm": 4.571963310241699, |
|
"learning_rate": 7.73873873873874e-06, |
|
"loss": 0.678, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 22.64, |
|
"grad_norm": 4.723329544067383, |
|
"learning_rate": 7.735735735735737e-06, |
|
"loss": 0.6616, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 22.67, |
|
"grad_norm": 5.058360576629639, |
|
"learning_rate": 7.732732732732733e-06, |
|
"loss": 0.6625, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 22.7, |
|
"grad_norm": 4.79093599319458, |
|
"learning_rate": 7.72972972972973e-06, |
|
"loss": 0.6757, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 22.73, |
|
"grad_norm": 4.190638065338135, |
|
"learning_rate": 7.726726726726728e-06, |
|
"loss": 0.6625, |
|
"step": 7570 |
|
}, |
|
{ |
|
"epoch": 22.76, |
|
"grad_norm": 4.085198879241943, |
|
"learning_rate": 7.723723723723724e-06, |
|
"loss": 0.7429, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 22.79, |
|
"grad_norm": 5.105234146118164, |
|
"learning_rate": 7.720720720720721e-06, |
|
"loss": 0.6598, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 22.82, |
|
"grad_norm": 5.374044895172119, |
|
"learning_rate": 7.717717717717719e-06, |
|
"loss": 0.7003, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 22.85, |
|
"grad_norm": 6.302853107452393, |
|
"learning_rate": 7.714714714714714e-06, |
|
"loss": 0.6475, |
|
"step": 7610 |
|
}, |
|
{ |
|
"epoch": 22.88, |
|
"grad_norm": 4.560659885406494, |
|
"learning_rate": 7.711711711711712e-06, |
|
"loss": 0.695, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 22.91, |
|
"grad_norm": 4.321108818054199, |
|
"learning_rate": 7.70870870870871e-06, |
|
"loss": 0.6393, |
|
"step": 7630 |
|
}, |
|
{ |
|
"epoch": 22.94, |
|
"grad_norm": 4.939061164855957, |
|
"learning_rate": 7.705705705705707e-06, |
|
"loss": 0.6672, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 22.97, |
|
"grad_norm": 3.5717639923095703, |
|
"learning_rate": 7.702702702702704e-06, |
|
"loss": 0.6541, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 23.0, |
|
"eval_accuracy": 0.913, |
|
"eval_loss": 0.4087793827056885, |
|
"eval_runtime": 26.1621, |
|
"eval_samples_per_second": 382.232, |
|
"eval_steps_per_second": 1.529, |
|
"step": 7659 |
|
}, |
|
{ |
|
"epoch": 23.0, |
|
"grad_norm": 6.035078048706055, |
|
"learning_rate": 7.6996996996997e-06, |
|
"loss": 0.6338, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 23.03, |
|
"grad_norm": 6.63695764541626, |
|
"learning_rate": 7.696696696696698e-06, |
|
"loss": 0.6343, |
|
"step": 7670 |
|
}, |
|
{ |
|
"epoch": 23.06, |
|
"grad_norm": 4.683477878570557, |
|
"learning_rate": 7.693693693693695e-06, |
|
"loss": 0.6791, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 23.09, |
|
"grad_norm": 4.074741363525391, |
|
"learning_rate": 7.69069069069069e-06, |
|
"loss": 0.6373, |
|
"step": 7690 |
|
}, |
|
{ |
|
"epoch": 23.12, |
|
"grad_norm": 3.735003709793091, |
|
"learning_rate": 7.687687687687688e-06, |
|
"loss": 0.5999, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 23.15, |
|
"grad_norm": 4.406135082244873, |
|
"learning_rate": 7.684684684684686e-06, |
|
"loss": 0.6841, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 23.18, |
|
"grad_norm": 5.173962116241455, |
|
"learning_rate": 7.681681681681682e-06, |
|
"loss": 0.6907, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 23.21, |
|
"grad_norm": 5.311886310577393, |
|
"learning_rate": 7.678678678678679e-06, |
|
"loss": 0.6303, |
|
"step": 7730 |
|
}, |
|
{ |
|
"epoch": 23.24, |
|
"grad_norm": 8.055031776428223, |
|
"learning_rate": 7.675675675675676e-06, |
|
"loss": 0.6364, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 23.27, |
|
"grad_norm": 4.631924152374268, |
|
"learning_rate": 7.672672672672672e-06, |
|
"loss": 0.6847, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 23.3, |
|
"grad_norm": 3.524216413497925, |
|
"learning_rate": 7.66966966966967e-06, |
|
"loss": 0.6307, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 23.33, |
|
"grad_norm": 6.029527187347412, |
|
"learning_rate": 7.666666666666667e-06, |
|
"loss": 0.6106, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 23.36, |
|
"grad_norm": 4.7900543212890625, |
|
"learning_rate": 7.663663663663665e-06, |
|
"loss": 0.6498, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 23.39, |
|
"grad_norm": 5.736597537994385, |
|
"learning_rate": 7.660660660660662e-06, |
|
"loss": 0.6356, |
|
"step": 7790 |
|
}, |
|
{ |
|
"epoch": 23.42, |
|
"grad_norm": 4.166688919067383, |
|
"learning_rate": 7.657657657657658e-06, |
|
"loss": 0.657, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 23.45, |
|
"grad_norm": 5.274576187133789, |
|
"learning_rate": 7.654654654654655e-06, |
|
"loss": 0.6014, |
|
"step": 7810 |
|
}, |
|
{ |
|
"epoch": 23.48, |
|
"grad_norm": 6.013502597808838, |
|
"learning_rate": 7.651651651651653e-06, |
|
"loss": 0.6372, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 23.51, |
|
"grad_norm": 5.510165214538574, |
|
"learning_rate": 7.648648648648649e-06, |
|
"loss": 0.6537, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 23.54, |
|
"grad_norm": 4.288418769836426, |
|
"learning_rate": 7.645645645645646e-06, |
|
"loss": 0.6389, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 23.57, |
|
"grad_norm": 4.00223970413208, |
|
"learning_rate": 7.642642642642644e-06, |
|
"loss": 0.7018, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 23.6, |
|
"grad_norm": 3.205495595932007, |
|
"learning_rate": 7.63963963963964e-06, |
|
"loss": 0.5617, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 23.63, |
|
"grad_norm": 5.546922206878662, |
|
"learning_rate": 7.636636636636637e-06, |
|
"loss": 0.6275, |
|
"step": 7870 |
|
}, |
|
{ |
|
"epoch": 23.66, |
|
"grad_norm": 4.7760114669799805, |
|
"learning_rate": 7.633633633633634e-06, |
|
"loss": 0.6531, |
|
"step": 7880 |
|
}, |
|
{ |
|
"epoch": 23.69, |
|
"grad_norm": 4.954517841339111, |
|
"learning_rate": 7.63063063063063e-06, |
|
"loss": 0.6735, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 23.72, |
|
"grad_norm": 3.2795095443725586, |
|
"learning_rate": 7.6276276276276285e-06, |
|
"loss": 0.639, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 23.75, |
|
"grad_norm": 4.749567031860352, |
|
"learning_rate": 7.624624624624624e-06, |
|
"loss": 0.6141, |
|
"step": 7910 |
|
}, |
|
{ |
|
"epoch": 23.78, |
|
"grad_norm": 4.061180114746094, |
|
"learning_rate": 7.621621621621622e-06, |
|
"loss": 0.6722, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 23.81, |
|
"grad_norm": 5.958366870880127, |
|
"learning_rate": 7.618618618618619e-06, |
|
"loss": 0.6463, |
|
"step": 7930 |
|
}, |
|
{ |
|
"epoch": 23.84, |
|
"grad_norm": 4.867851257324219, |
|
"learning_rate": 7.615615615615616e-06, |
|
"loss": 0.6448, |
|
"step": 7940 |
|
}, |
|
{ |
|
"epoch": 23.87, |
|
"grad_norm": 6.845958232879639, |
|
"learning_rate": 7.612612612612613e-06, |
|
"loss": 0.6182, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 23.9, |
|
"grad_norm": 4.476239204406738, |
|
"learning_rate": 7.609609609609611e-06, |
|
"loss": 0.5616, |
|
"step": 7960 |
|
}, |
|
{ |
|
"epoch": 23.93, |
|
"grad_norm": 3.8598573207855225, |
|
"learning_rate": 7.606606606606607e-06, |
|
"loss": 0.6865, |
|
"step": 7970 |
|
}, |
|
{ |
|
"epoch": 23.96, |
|
"grad_norm": 4.300548076629639, |
|
"learning_rate": 7.603603603603604e-06, |
|
"loss": 0.6685, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 23.99, |
|
"grad_norm": 5.2045512199401855, |
|
"learning_rate": 7.600600600600602e-06, |
|
"loss": 0.6338, |
|
"step": 7990 |
|
}, |
|
{ |
|
"epoch": 24.0, |
|
"eval_accuracy": 0.9172, |
|
"eval_loss": 0.3914088010787964, |
|
"eval_runtime": 26.5071, |
|
"eval_samples_per_second": 377.258, |
|
"eval_steps_per_second": 1.509, |
|
"step": 7992 |
|
}, |
|
{ |
|
"epoch": 24.02, |
|
"grad_norm": 4.160776138305664, |
|
"learning_rate": 7.597597597597598e-06, |
|
"loss": 0.6086, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 24.05, |
|
"grad_norm": 5.0602850914001465, |
|
"learning_rate": 7.594594594594596e-06, |
|
"loss": 0.6292, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 24.08, |
|
"grad_norm": 5.049746990203857, |
|
"learning_rate": 7.591591591591592e-06, |
|
"loss": 0.6355, |
|
"step": 8020 |
|
}, |
|
{ |
|
"epoch": 24.11, |
|
"grad_norm": 2.8283441066741943, |
|
"learning_rate": 7.588588588588589e-06, |
|
"loss": 0.5961, |
|
"step": 8030 |
|
}, |
|
{ |
|
"epoch": 24.14, |
|
"grad_norm": 4.665635585784912, |
|
"learning_rate": 7.5855855855855865e-06, |
|
"loss": 0.6135, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 24.17, |
|
"grad_norm": 5.210675239562988, |
|
"learning_rate": 7.582582582582583e-06, |
|
"loss": 0.6286, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 24.2, |
|
"grad_norm": 5.123960971832275, |
|
"learning_rate": 7.57957957957958e-06, |
|
"loss": 0.6597, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 24.23, |
|
"grad_norm": 4.584262847900391, |
|
"learning_rate": 7.576576576576577e-06, |
|
"loss": 0.6415, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 24.26, |
|
"grad_norm": 6.948488712310791, |
|
"learning_rate": 7.573573573573574e-06, |
|
"loss": 0.602, |
|
"step": 8080 |
|
}, |
|
{ |
|
"epoch": 24.29, |
|
"grad_norm": 5.196147441864014, |
|
"learning_rate": 7.570570570570571e-06, |
|
"loss": 0.6441, |
|
"step": 8090 |
|
}, |
|
{ |
|
"epoch": 24.32, |
|
"grad_norm": 4.633663177490234, |
|
"learning_rate": 7.567567567567569e-06, |
|
"loss": 0.6568, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 24.35, |
|
"grad_norm": 4.8977484703063965, |
|
"learning_rate": 7.5645645645645646e-06, |
|
"loss": 0.6384, |
|
"step": 8110 |
|
}, |
|
{ |
|
"epoch": 24.38, |
|
"grad_norm": 4.151755332946777, |
|
"learning_rate": 7.561561561561562e-06, |
|
"loss": 0.6328, |
|
"step": 8120 |
|
}, |
|
{ |
|
"epoch": 24.41, |
|
"grad_norm": 4.1300883293151855, |
|
"learning_rate": 7.5585585585585595e-06, |
|
"loss": 0.6744, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 24.44, |
|
"grad_norm": 5.872833728790283, |
|
"learning_rate": 7.555555555555556e-06, |
|
"loss": 0.6554, |
|
"step": 8140 |
|
}, |
|
{ |
|
"epoch": 24.47, |
|
"grad_norm": 4.006340503692627, |
|
"learning_rate": 7.552552552552554e-06, |
|
"loss": 0.63, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 24.5, |
|
"grad_norm": 5.045310974121094, |
|
"learning_rate": 7.549549549549549e-06, |
|
"loss": 0.6258, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 24.53, |
|
"grad_norm": 4.250802040100098, |
|
"learning_rate": 7.546546546546547e-06, |
|
"loss": 0.6523, |
|
"step": 8170 |
|
}, |
|
{ |
|
"epoch": 24.56, |
|
"grad_norm": 4.446349620819092, |
|
"learning_rate": 7.543543543543544e-06, |
|
"loss": 0.61, |
|
"step": 8180 |
|
}, |
|
{ |
|
"epoch": 24.59, |
|
"grad_norm": 5.197299957275391, |
|
"learning_rate": 7.540540540540541e-06, |
|
"loss": 0.619, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 24.62, |
|
"grad_norm": 4.9567484855651855, |
|
"learning_rate": 7.5375375375375385e-06, |
|
"loss": 0.5888, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 24.65, |
|
"grad_norm": 4.429915428161621, |
|
"learning_rate": 7.534534534534535e-06, |
|
"loss": 0.6331, |
|
"step": 8210 |
|
}, |
|
{ |
|
"epoch": 24.68, |
|
"grad_norm": 5.928262233734131, |
|
"learning_rate": 7.531531531531532e-06, |
|
"loss": 0.6002, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 24.71, |
|
"grad_norm": 3.7171638011932373, |
|
"learning_rate": 7.528528528528529e-06, |
|
"loss": 0.6216, |
|
"step": 8230 |
|
}, |
|
{ |
|
"epoch": 24.74, |
|
"grad_norm": 3.967848300933838, |
|
"learning_rate": 7.525525525525527e-06, |
|
"loss": 0.5985, |
|
"step": 8240 |
|
}, |
|
{ |
|
"epoch": 24.77, |
|
"grad_norm": 4.106054306030273, |
|
"learning_rate": 7.5225225225225225e-06, |
|
"loss": 0.6488, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 24.8, |
|
"grad_norm": 4.115420341491699, |
|
"learning_rate": 7.51951951951952e-06, |
|
"loss": 0.6393, |
|
"step": 8260 |
|
}, |
|
{ |
|
"epoch": 24.83, |
|
"grad_norm": 5.088920593261719, |
|
"learning_rate": 7.516516516516517e-06, |
|
"loss": 0.6386, |
|
"step": 8270 |
|
}, |
|
{ |
|
"epoch": 24.86, |
|
"grad_norm": 4.073686122894287, |
|
"learning_rate": 7.513513513513514e-06, |
|
"loss": 0.5533, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 24.89, |
|
"grad_norm": 4.537994861602783, |
|
"learning_rate": 7.5105105105105116e-06, |
|
"loss": 0.6044, |
|
"step": 8290 |
|
}, |
|
{ |
|
"epoch": 24.92, |
|
"grad_norm": 4.048253059387207, |
|
"learning_rate": 7.507507507507507e-06, |
|
"loss": 0.5597, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 24.95, |
|
"grad_norm": 6.220718860626221, |
|
"learning_rate": 7.504504504504505e-06, |
|
"loss": 0.6354, |
|
"step": 8310 |
|
}, |
|
{ |
|
"epoch": 24.98, |
|
"grad_norm": 4.656418800354004, |
|
"learning_rate": 7.501501501501502e-06, |
|
"loss": 0.6097, |
|
"step": 8320 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"eval_accuracy": 0.9182, |
|
"eval_loss": 0.37756532430648804, |
|
"eval_runtime": 26.2998, |
|
"eval_samples_per_second": 380.232, |
|
"eval_steps_per_second": 1.521, |
|
"step": 8325 |
|
}, |
|
{ |
|
"epoch": 25.02, |
|
"grad_norm": 4.741297721862793, |
|
"learning_rate": 7.498498498498499e-06, |
|
"loss": 0.6371, |
|
"step": 8330 |
|
}, |
|
{ |
|
"epoch": 25.05, |
|
"grad_norm": 7.324318885803223, |
|
"learning_rate": 7.495495495495496e-06, |
|
"loss": 0.6157, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 25.08, |
|
"grad_norm": 5.345495700836182, |
|
"learning_rate": 7.492492492492494e-06, |
|
"loss": 0.6325, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 25.11, |
|
"grad_norm": 5.9051690101623535, |
|
"learning_rate": 7.48948948948949e-06, |
|
"loss": 0.5899, |
|
"step": 8360 |
|
}, |
|
{ |
|
"epoch": 25.14, |
|
"grad_norm": 6.787704944610596, |
|
"learning_rate": 7.486486486486487e-06, |
|
"loss": 0.6416, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 25.17, |
|
"grad_norm": 4.713385581970215, |
|
"learning_rate": 7.483483483483485e-06, |
|
"loss": 0.6718, |
|
"step": 8380 |
|
}, |
|
{ |
|
"epoch": 25.2, |
|
"grad_norm": 5.411506652832031, |
|
"learning_rate": 7.480480480480481e-06, |
|
"loss": 0.6059, |
|
"step": 8390 |
|
}, |
|
{ |
|
"epoch": 25.23, |
|
"grad_norm": 4.1856207847595215, |
|
"learning_rate": 7.477477477477479e-06, |
|
"loss": 0.5297, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 25.26, |
|
"grad_norm": 3.9937260150909424, |
|
"learning_rate": 7.4744744744744745e-06, |
|
"loss": 0.607, |
|
"step": 8410 |
|
}, |
|
{ |
|
"epoch": 25.29, |
|
"grad_norm": 3.861163854598999, |
|
"learning_rate": 7.471471471471472e-06, |
|
"loss": 0.5885, |
|
"step": 8420 |
|
}, |
|
{ |
|
"epoch": 25.32, |
|
"grad_norm": 5.328300952911377, |
|
"learning_rate": 7.4684684684684695e-06, |
|
"loss": 0.695, |
|
"step": 8430 |
|
}, |
|
{ |
|
"epoch": 25.35, |
|
"grad_norm": 4.425115585327148, |
|
"learning_rate": 7.465465465465466e-06, |
|
"loss": 0.5812, |
|
"step": 8440 |
|
}, |
|
{ |
|
"epoch": 25.38, |
|
"grad_norm": 5.137642860412598, |
|
"learning_rate": 7.462462462462463e-06, |
|
"loss": 0.6438, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 25.41, |
|
"grad_norm": 4.726527690887451, |
|
"learning_rate": 7.45945945945946e-06, |
|
"loss": 0.621, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 25.44, |
|
"grad_norm": 4.208578109741211, |
|
"learning_rate": 7.456456456456457e-06, |
|
"loss": 0.6252, |
|
"step": 8470 |
|
}, |
|
{ |
|
"epoch": 25.47, |
|
"grad_norm": 4.630558013916016, |
|
"learning_rate": 7.453453453453454e-06, |
|
"loss": 0.5678, |
|
"step": 8480 |
|
}, |
|
{ |
|
"epoch": 25.5, |
|
"grad_norm": 5.314132213592529, |
|
"learning_rate": 7.450450450450452e-06, |
|
"loss": 0.6658, |
|
"step": 8490 |
|
}, |
|
{ |
|
"epoch": 25.53, |
|
"grad_norm": 5.784879207611084, |
|
"learning_rate": 7.447447447447448e-06, |
|
"loss": 0.622, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 25.56, |
|
"grad_norm": 5.003414630889893, |
|
"learning_rate": 7.444444444444445e-06, |
|
"loss": 0.6246, |
|
"step": 8510 |
|
}, |
|
{ |
|
"epoch": 25.59, |
|
"grad_norm": 3.977597236633301, |
|
"learning_rate": 7.441441441441442e-06, |
|
"loss": 0.6009, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 25.62, |
|
"grad_norm": 5.353960037231445, |
|
"learning_rate": 7.438438438438439e-06, |
|
"loss": 0.6211, |
|
"step": 8530 |
|
}, |
|
{ |
|
"epoch": 25.65, |
|
"grad_norm": 5.512233257293701, |
|
"learning_rate": 7.435435435435437e-06, |
|
"loss": 0.6511, |
|
"step": 8540 |
|
}, |
|
{ |
|
"epoch": 25.68, |
|
"grad_norm": 3.425570011138916, |
|
"learning_rate": 7.4324324324324324e-06, |
|
"loss": 0.6653, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 25.71, |
|
"grad_norm": 5.093016147613525, |
|
"learning_rate": 7.42942942942943e-06, |
|
"loss": 0.649, |
|
"step": 8560 |
|
}, |
|
{ |
|
"epoch": 25.74, |
|
"grad_norm": 4.121246814727783, |
|
"learning_rate": 7.426426426426427e-06, |
|
"loss": 0.6374, |
|
"step": 8570 |
|
}, |
|
{ |
|
"epoch": 25.77, |
|
"grad_norm": 4.50221586227417, |
|
"learning_rate": 7.423423423423424e-06, |
|
"loss": 0.5874, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 25.8, |
|
"grad_norm": 5.9485063552856445, |
|
"learning_rate": 7.4204204204204215e-06, |
|
"loss": 0.6417, |
|
"step": 8590 |
|
}, |
|
{ |
|
"epoch": 25.83, |
|
"grad_norm": 4.642158508300781, |
|
"learning_rate": 7.417417417417418e-06, |
|
"loss": 0.5855, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 25.86, |
|
"grad_norm": 5.189647674560547, |
|
"learning_rate": 7.414414414414415e-06, |
|
"loss": 0.5878, |
|
"step": 8610 |
|
}, |
|
{ |
|
"epoch": 25.89, |
|
"grad_norm": 4.6904191970825195, |
|
"learning_rate": 7.411411411411412e-06, |
|
"loss": 0.637, |
|
"step": 8620 |
|
}, |
|
{ |
|
"epoch": 25.92, |
|
"grad_norm": 3.443445920944214, |
|
"learning_rate": 7.408408408408409e-06, |
|
"loss": 0.607, |
|
"step": 8630 |
|
}, |
|
{ |
|
"epoch": 25.95, |
|
"grad_norm": 4.997288703918457, |
|
"learning_rate": 7.4054054054054055e-06, |
|
"loss": 0.602, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 25.98, |
|
"grad_norm": 4.938547134399414, |
|
"learning_rate": 7.402402402402403e-06, |
|
"loss": 0.6369, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 26.0, |
|
"eval_accuracy": 0.9155, |
|
"eval_loss": 0.3676045536994934, |
|
"eval_runtime": 26.6726, |
|
"eval_samples_per_second": 374.916, |
|
"eval_steps_per_second": 1.5, |
|
"step": 8658 |
|
}, |
|
{ |
|
"epoch": 26.01, |
|
"grad_norm": 5.075904846191406, |
|
"learning_rate": 7.3993993993994e-06, |
|
"loss": 0.5574, |
|
"step": 8660 |
|
}, |
|
{ |
|
"epoch": 26.04, |
|
"grad_norm": 4.672639846801758, |
|
"learning_rate": 7.396396396396397e-06, |
|
"loss": 0.5918, |
|
"step": 8670 |
|
}, |
|
{ |
|
"epoch": 26.07, |
|
"grad_norm": 4.947727203369141, |
|
"learning_rate": 7.393393393393395e-06, |
|
"loss": 0.6122, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 26.1, |
|
"grad_norm": 3.5392355918884277, |
|
"learning_rate": 7.39039039039039e-06, |
|
"loss": 0.5744, |
|
"step": 8690 |
|
}, |
|
{ |
|
"epoch": 26.13, |
|
"grad_norm": 6.616043567657471, |
|
"learning_rate": 7.387387387387388e-06, |
|
"loss": 0.5518, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 26.16, |
|
"grad_norm": 3.9501121044158936, |
|
"learning_rate": 7.384384384384385e-06, |
|
"loss": 0.5757, |
|
"step": 8710 |
|
}, |
|
{ |
|
"epoch": 26.19, |
|
"grad_norm": 6.87493896484375, |
|
"learning_rate": 7.381381381381382e-06, |
|
"loss": 0.6087, |
|
"step": 8720 |
|
}, |
|
{ |
|
"epoch": 26.22, |
|
"grad_norm": 3.392703056335449, |
|
"learning_rate": 7.3783783783783794e-06, |
|
"loss": 0.6107, |
|
"step": 8730 |
|
}, |
|
{ |
|
"epoch": 26.25, |
|
"grad_norm": 5.120011806488037, |
|
"learning_rate": 7.375375375375377e-06, |
|
"loss": 0.614, |
|
"step": 8740 |
|
}, |
|
{ |
|
"epoch": 26.28, |
|
"grad_norm": 4.2964348793029785, |
|
"learning_rate": 7.372372372372373e-06, |
|
"loss": 0.5861, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 26.31, |
|
"grad_norm": 4.34578275680542, |
|
"learning_rate": 7.36936936936937e-06, |
|
"loss": 0.664, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 26.34, |
|
"grad_norm": 4.854133129119873, |
|
"learning_rate": 7.366366366366367e-06, |
|
"loss": 0.5872, |
|
"step": 8770 |
|
}, |
|
{ |
|
"epoch": 26.37, |
|
"grad_norm": 5.136003494262695, |
|
"learning_rate": 7.363363363363364e-06, |
|
"loss": 0.6231, |
|
"step": 8780 |
|
}, |
|
{ |
|
"epoch": 26.4, |
|
"grad_norm": 6.438120365142822, |
|
"learning_rate": 7.360360360360361e-06, |
|
"loss": 0.64, |
|
"step": 8790 |
|
}, |
|
{ |
|
"epoch": 26.43, |
|
"grad_norm": 5.689127445220947, |
|
"learning_rate": 7.3573573573573575e-06, |
|
"loss": 0.7184, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 26.46, |
|
"grad_norm": 4.496994495391846, |
|
"learning_rate": 7.354354354354355e-06, |
|
"loss": 0.633, |
|
"step": 8810 |
|
}, |
|
{ |
|
"epoch": 26.49, |
|
"grad_norm": 4.275875568389893, |
|
"learning_rate": 7.3513513513513525e-06, |
|
"loss": 0.6198, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 26.52, |
|
"grad_norm": 5.224747180938721, |
|
"learning_rate": 7.348348348348348e-06, |
|
"loss": 0.5837, |
|
"step": 8830 |
|
}, |
|
{ |
|
"epoch": 26.55, |
|
"grad_norm": 5.464904308319092, |
|
"learning_rate": 7.345345345345346e-06, |
|
"loss": 0.5972, |
|
"step": 8840 |
|
}, |
|
{ |
|
"epoch": 26.58, |
|
"grad_norm": 15.829516410827637, |
|
"learning_rate": 7.342342342342343e-06, |
|
"loss": 0.6058, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 26.61, |
|
"grad_norm": 5.066461563110352, |
|
"learning_rate": 7.33933933933934e-06, |
|
"loss": 0.6161, |
|
"step": 8860 |
|
}, |
|
{ |
|
"epoch": 26.64, |
|
"grad_norm": 6.489962577819824, |
|
"learning_rate": 7.336336336336337e-06, |
|
"loss": 0.6586, |
|
"step": 8870 |
|
}, |
|
{ |
|
"epoch": 26.67, |
|
"grad_norm": 4.583408832550049, |
|
"learning_rate": 7.333333333333333e-06, |
|
"loss": 0.6123, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 26.7, |
|
"grad_norm": 4.31550931930542, |
|
"learning_rate": 7.330330330330331e-06, |
|
"loss": 0.5898, |
|
"step": 8890 |
|
}, |
|
{ |
|
"epoch": 26.73, |
|
"grad_norm": 3.4311511516571045, |
|
"learning_rate": 7.327327327327328e-06, |
|
"loss": 0.5722, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 26.76, |
|
"grad_norm": 4.0307464599609375, |
|
"learning_rate": 7.324324324324325e-06, |
|
"loss": 0.5315, |
|
"step": 8910 |
|
}, |
|
{ |
|
"epoch": 26.79, |
|
"grad_norm": 4.3041911125183105, |
|
"learning_rate": 7.321321321321322e-06, |
|
"loss": 0.5483, |
|
"step": 8920 |
|
}, |
|
{ |
|
"epoch": 26.82, |
|
"grad_norm": 5.518500804901123, |
|
"learning_rate": 7.31831831831832e-06, |
|
"loss": 0.6019, |
|
"step": 8930 |
|
}, |
|
{ |
|
"epoch": 26.85, |
|
"grad_norm": 4.37266206741333, |
|
"learning_rate": 7.3153153153153155e-06, |
|
"loss": 0.5912, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 26.88, |
|
"grad_norm": 6.165101051330566, |
|
"learning_rate": 7.312312312312313e-06, |
|
"loss": 0.5828, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 26.91, |
|
"grad_norm": 4.071359157562256, |
|
"learning_rate": 7.3093093093093104e-06, |
|
"loss": 0.5642, |
|
"step": 8960 |
|
}, |
|
{ |
|
"epoch": 26.94, |
|
"grad_norm": 7.619660377502441, |
|
"learning_rate": 7.306306306306307e-06, |
|
"loss": 0.608, |
|
"step": 8970 |
|
}, |
|
{ |
|
"epoch": 26.97, |
|
"grad_norm": 5.814097881317139, |
|
"learning_rate": 7.303303303303304e-06, |
|
"loss": 0.5829, |
|
"step": 8980 |
|
}, |
|
{ |
|
"epoch": 27.0, |
|
"grad_norm": 3.6844120025634766, |
|
"learning_rate": 7.3003003003003e-06, |
|
"loss": 0.6007, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 27.0, |
|
"eval_accuracy": 0.9149, |
|
"eval_loss": 0.3631533086299896, |
|
"eval_runtime": 26.1151, |
|
"eval_samples_per_second": 382.92, |
|
"eval_steps_per_second": 1.532, |
|
"step": 8991 |
|
}, |
|
{ |
|
"epoch": 27.03, |
|
"grad_norm": 3.1218578815460205, |
|
"learning_rate": 7.297297297297298e-06, |
|
"loss": 0.5263, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 27.06, |
|
"grad_norm": 31.088542938232422, |
|
"learning_rate": 7.294294294294295e-06, |
|
"loss": 0.5519, |
|
"step": 9010 |
|
}, |
|
{ |
|
"epoch": 27.09, |
|
"grad_norm": 5.128907203674316, |
|
"learning_rate": 7.291291291291291e-06, |
|
"loss": 0.5794, |
|
"step": 9020 |
|
}, |
|
{ |
|
"epoch": 27.12, |
|
"grad_norm": 5.448819160461426, |
|
"learning_rate": 7.2882882882882885e-06, |
|
"loss": 0.5923, |
|
"step": 9030 |
|
}, |
|
{ |
|
"epoch": 27.15, |
|
"grad_norm": 5.304131031036377, |
|
"learning_rate": 7.285285285285286e-06, |
|
"loss": 0.6297, |
|
"step": 9040 |
|
}, |
|
{ |
|
"epoch": 27.18, |
|
"grad_norm": 3.740703582763672, |
|
"learning_rate": 7.282282282282283e-06, |
|
"loss": 0.5815, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 27.21, |
|
"grad_norm": 4.914813995361328, |
|
"learning_rate": 7.27927927927928e-06, |
|
"loss": 0.5864, |
|
"step": 9060 |
|
}, |
|
{ |
|
"epoch": 27.24, |
|
"grad_norm": 4.869528770446777, |
|
"learning_rate": 7.276276276276278e-06, |
|
"loss": 0.5698, |
|
"step": 9070 |
|
}, |
|
{ |
|
"epoch": 27.27, |
|
"grad_norm": 4.342233180999756, |
|
"learning_rate": 7.273273273273273e-06, |
|
"loss": 0.5462, |
|
"step": 9080 |
|
}, |
|
{ |
|
"epoch": 27.3, |
|
"grad_norm": 4.266785144805908, |
|
"learning_rate": 7.270270270270271e-06, |
|
"loss": 0.5573, |
|
"step": 9090 |
|
}, |
|
{ |
|
"epoch": 27.33, |
|
"grad_norm": 3.1235883235931396, |
|
"learning_rate": 7.267267267267268e-06, |
|
"loss": 0.5371, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 27.36, |
|
"grad_norm": 5.869687080383301, |
|
"learning_rate": 7.264264264264265e-06, |
|
"loss": 0.613, |
|
"step": 9110 |
|
}, |
|
{ |
|
"epoch": 27.39, |
|
"grad_norm": 4.293118476867676, |
|
"learning_rate": 7.2612612612612625e-06, |
|
"loss": 0.6284, |
|
"step": 9120 |
|
}, |
|
{ |
|
"epoch": 27.42, |
|
"grad_norm": 6.922875881195068, |
|
"learning_rate": 7.258258258258258e-06, |
|
"loss": 0.611, |
|
"step": 9130 |
|
}, |
|
{ |
|
"epoch": 27.45, |
|
"grad_norm": 3.4923338890075684, |
|
"learning_rate": 7.255255255255256e-06, |
|
"loss": 0.5713, |
|
"step": 9140 |
|
}, |
|
{ |
|
"epoch": 27.48, |
|
"grad_norm": 4.0018696784973145, |
|
"learning_rate": 7.252252252252253e-06, |
|
"loss": 0.6327, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 27.51, |
|
"grad_norm": 6.069986820220947, |
|
"learning_rate": 7.24924924924925e-06, |
|
"loss": 0.6136, |
|
"step": 9160 |
|
}, |
|
{ |
|
"epoch": 27.54, |
|
"grad_norm": 4.478414058685303, |
|
"learning_rate": 7.2462462462462465e-06, |
|
"loss": 0.5468, |
|
"step": 9170 |
|
}, |
|
{ |
|
"epoch": 27.57, |
|
"grad_norm": 6.923449993133545, |
|
"learning_rate": 7.243243243243244e-06, |
|
"loss": 0.5554, |
|
"step": 9180 |
|
}, |
|
{ |
|
"epoch": 27.6, |
|
"grad_norm": 3.9428858757019043, |
|
"learning_rate": 7.240240240240241e-06, |
|
"loss": 0.5201, |
|
"step": 9190 |
|
}, |
|
{ |
|
"epoch": 27.63, |
|
"grad_norm": 5.154752254486084, |
|
"learning_rate": 7.237237237237238e-06, |
|
"loss": 0.6153, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 27.66, |
|
"grad_norm": 7.000457286834717, |
|
"learning_rate": 7.2342342342342355e-06, |
|
"loss": 0.5862, |
|
"step": 9210 |
|
}, |
|
{ |
|
"epoch": 27.69, |
|
"grad_norm": 4.479945182800293, |
|
"learning_rate": 7.231231231231231e-06, |
|
"loss": 0.5617, |
|
"step": 9220 |
|
}, |
|
{ |
|
"epoch": 27.72, |
|
"grad_norm": 3.5248751640319824, |
|
"learning_rate": 7.228228228228229e-06, |
|
"loss": 0.6648, |
|
"step": 9230 |
|
}, |
|
{ |
|
"epoch": 27.75, |
|
"grad_norm": 4.968978404998779, |
|
"learning_rate": 7.2252252252252254e-06, |
|
"loss": 0.6252, |
|
"step": 9240 |
|
}, |
|
{ |
|
"epoch": 27.78, |
|
"grad_norm": 5.074804306030273, |
|
"learning_rate": 7.222222222222223e-06, |
|
"loss": 0.6298, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 27.81, |
|
"grad_norm": 6.238649368286133, |
|
"learning_rate": 7.21921921921922e-06, |
|
"loss": 0.5815, |
|
"step": 9260 |
|
}, |
|
{ |
|
"epoch": 27.84, |
|
"grad_norm": 5.709503650665283, |
|
"learning_rate": 7.216216216216216e-06, |
|
"loss": 0.6067, |
|
"step": 9270 |
|
}, |
|
{ |
|
"epoch": 27.87, |
|
"grad_norm": 2.9142568111419678, |
|
"learning_rate": 7.213213213213214e-06, |
|
"loss": 0.5691, |
|
"step": 9280 |
|
}, |
|
{ |
|
"epoch": 27.9, |
|
"grad_norm": 4.102430820465088, |
|
"learning_rate": 7.210210210210211e-06, |
|
"loss": 0.5858, |
|
"step": 9290 |
|
}, |
|
{ |
|
"epoch": 27.93, |
|
"grad_norm": 5.3256120681762695, |
|
"learning_rate": 7.207207207207208e-06, |
|
"loss": 0.5633, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 27.96, |
|
"grad_norm": 5.945285797119141, |
|
"learning_rate": 7.204204204204205e-06, |
|
"loss": 0.6175, |
|
"step": 9310 |
|
}, |
|
{ |
|
"epoch": 27.99, |
|
"grad_norm": 4.57643461227417, |
|
"learning_rate": 7.201201201201202e-06, |
|
"loss": 0.6059, |
|
"step": 9320 |
|
}, |
|
{ |
|
"epoch": 28.0, |
|
"eval_accuracy": 0.9187, |
|
"eval_loss": 0.3552107512950897, |
|
"eval_runtime": 26.4619, |
|
"eval_samples_per_second": 377.902, |
|
"eval_steps_per_second": 1.512, |
|
"step": 9324 |
|
}, |
|
{ |
|
"epoch": 28.02, |
|
"grad_norm": 3.537818193435669, |
|
"learning_rate": 7.1981981981981985e-06, |
|
"loss": 0.555, |
|
"step": 9330 |
|
}, |
|
{ |
|
"epoch": 28.05, |
|
"grad_norm": 5.589657783508301, |
|
"learning_rate": 7.195195195195196e-06, |
|
"loss": 0.555, |
|
"step": 9340 |
|
}, |
|
{ |
|
"epoch": 28.08, |
|
"grad_norm": 5.387350559234619, |
|
"learning_rate": 7.1921921921921935e-06, |
|
"loss": 0.6027, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 28.11, |
|
"grad_norm": 9.464296340942383, |
|
"learning_rate": 7.189189189189189e-06, |
|
"loss": 0.6326, |
|
"step": 9360 |
|
}, |
|
{ |
|
"epoch": 28.14, |
|
"grad_norm": 5.724453926086426, |
|
"learning_rate": 7.186186186186187e-06, |
|
"loss": 0.567, |
|
"step": 9370 |
|
}, |
|
{ |
|
"epoch": 28.17, |
|
"grad_norm": 4.872731685638428, |
|
"learning_rate": 7.183183183183183e-06, |
|
"loss": 0.5613, |
|
"step": 9380 |
|
}, |
|
{ |
|
"epoch": 28.2, |
|
"grad_norm": 8.261558532714844, |
|
"learning_rate": 7.180180180180181e-06, |
|
"loss": 0.5409, |
|
"step": 9390 |
|
}, |
|
{ |
|
"epoch": 28.23, |
|
"grad_norm": 4.817095756530762, |
|
"learning_rate": 7.177177177177178e-06, |
|
"loss": 0.5777, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 28.26, |
|
"grad_norm": 8.762271881103516, |
|
"learning_rate": 7.174174174174174e-06, |
|
"loss": 0.5176, |
|
"step": 9410 |
|
}, |
|
{ |
|
"epoch": 28.29, |
|
"grad_norm": 4.3167405128479, |
|
"learning_rate": 7.1711711711711716e-06, |
|
"loss": 0.5607, |
|
"step": 9420 |
|
}, |
|
{ |
|
"epoch": 28.32, |
|
"grad_norm": 4.858583450317383, |
|
"learning_rate": 7.168168168168169e-06, |
|
"loss": 0.5385, |
|
"step": 9430 |
|
}, |
|
{ |
|
"epoch": 28.35, |
|
"grad_norm": 3.918891429901123, |
|
"learning_rate": 7.165165165165166e-06, |
|
"loss": 0.6074, |
|
"step": 9440 |
|
}, |
|
{ |
|
"epoch": 28.38, |
|
"grad_norm": 6.432498455047607, |
|
"learning_rate": 7.162162162162163e-06, |
|
"loss": 0.5603, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 28.41, |
|
"grad_norm": 3.913588762283325, |
|
"learning_rate": 7.159159159159161e-06, |
|
"loss": 0.5224, |
|
"step": 9460 |
|
}, |
|
{ |
|
"epoch": 28.44, |
|
"grad_norm": 5.474986553192139, |
|
"learning_rate": 7.156156156156156e-06, |
|
"loss": 0.57, |
|
"step": 9470 |
|
}, |
|
{ |
|
"epoch": 28.47, |
|
"grad_norm": 3.7697129249572754, |
|
"learning_rate": 7.153153153153154e-06, |
|
"loss": 0.5842, |
|
"step": 9480 |
|
}, |
|
{ |
|
"epoch": 28.5, |
|
"grad_norm": 3.3923611640930176, |
|
"learning_rate": 7.1501501501501505e-06, |
|
"loss": 0.5817, |
|
"step": 9490 |
|
}, |
|
{ |
|
"epoch": 28.53, |
|
"grad_norm": 4.007638931274414, |
|
"learning_rate": 7.147147147147148e-06, |
|
"loss": 0.5397, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 28.56, |
|
"grad_norm": 5.719387531280518, |
|
"learning_rate": 7.144144144144145e-06, |
|
"loss": 0.578, |
|
"step": 9510 |
|
}, |
|
{ |
|
"epoch": 28.59, |
|
"grad_norm": 3.7497189044952393, |
|
"learning_rate": 7.141141141141141e-06, |
|
"loss": 0.561, |
|
"step": 9520 |
|
}, |
|
{ |
|
"epoch": 28.62, |
|
"grad_norm": 6.521295070648193, |
|
"learning_rate": 7.138138138138139e-06, |
|
"loss": 0.6263, |
|
"step": 9530 |
|
}, |
|
{ |
|
"epoch": 28.65, |
|
"grad_norm": 6.38608980178833, |
|
"learning_rate": 7.135135135135136e-06, |
|
"loss": 0.5146, |
|
"step": 9540 |
|
}, |
|
{ |
|
"epoch": 28.68, |
|
"grad_norm": 4.536646366119385, |
|
"learning_rate": 7.132132132132132e-06, |
|
"loss": 0.5913, |
|
"step": 9550 |
|
}, |
|
{ |
|
"epoch": 28.71, |
|
"grad_norm": 5.932296276092529, |
|
"learning_rate": 7.1291291291291295e-06, |
|
"loss": 0.6529, |
|
"step": 9560 |
|
}, |
|
{ |
|
"epoch": 28.74, |
|
"grad_norm": 4.608974456787109, |
|
"learning_rate": 7.126126126126127e-06, |
|
"loss": 0.5663, |
|
"step": 9570 |
|
}, |
|
{ |
|
"epoch": 28.77, |
|
"grad_norm": 5.467312335968018, |
|
"learning_rate": 7.123123123123124e-06, |
|
"loss": 0.5624, |
|
"step": 9580 |
|
}, |
|
{ |
|
"epoch": 28.8, |
|
"grad_norm": 4.99942684173584, |
|
"learning_rate": 7.120120120120121e-06, |
|
"loss": 0.6335, |
|
"step": 9590 |
|
}, |
|
{ |
|
"epoch": 28.83, |
|
"grad_norm": 5.651036262512207, |
|
"learning_rate": 7.117117117117117e-06, |
|
"loss": 0.5798, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 28.86, |
|
"grad_norm": 3.7196743488311768, |
|
"learning_rate": 7.114114114114114e-06, |
|
"loss": 0.5707, |
|
"step": 9610 |
|
}, |
|
{ |
|
"epoch": 28.89, |
|
"grad_norm": 4.347270965576172, |
|
"learning_rate": 7.111111111111112e-06, |
|
"loss": 0.5273, |
|
"step": 9620 |
|
}, |
|
{ |
|
"epoch": 28.92, |
|
"grad_norm": 4.918589115142822, |
|
"learning_rate": 7.1081081081081085e-06, |
|
"loss": 0.5301, |
|
"step": 9630 |
|
}, |
|
{ |
|
"epoch": 28.95, |
|
"grad_norm": 5.1846923828125, |
|
"learning_rate": 7.105105105105106e-06, |
|
"loss": 0.6091, |
|
"step": 9640 |
|
}, |
|
{ |
|
"epoch": 28.98, |
|
"grad_norm": 5.065564155578613, |
|
"learning_rate": 7.102102102102103e-06, |
|
"loss": 0.5227, |
|
"step": 9650 |
|
}, |
|
{ |
|
"epoch": 29.0, |
|
"eval_accuracy": 0.9178, |
|
"eval_loss": 0.345379114151001, |
|
"eval_runtime": 26.7038, |
|
"eval_samples_per_second": 374.479, |
|
"eval_steps_per_second": 1.498, |
|
"step": 9657 |
|
}, |
|
{ |
|
"epoch": 29.01, |
|
"grad_norm": 4.072543621063232, |
|
"learning_rate": 7.099099099099099e-06, |
|
"loss": 0.581, |
|
"step": 9660 |
|
}, |
|
{ |
|
"epoch": 29.04, |
|
"grad_norm": 5.343716621398926, |
|
"learning_rate": 7.096096096096097e-06, |
|
"loss": 0.6008, |
|
"step": 9670 |
|
}, |
|
{ |
|
"epoch": 29.07, |
|
"grad_norm": 4.13471794128418, |
|
"learning_rate": 7.093093093093094e-06, |
|
"loss": 0.5271, |
|
"step": 9680 |
|
}, |
|
{ |
|
"epoch": 29.1, |
|
"grad_norm": 4.203214168548584, |
|
"learning_rate": 7.090090090090091e-06, |
|
"loss": 0.5814, |
|
"step": 9690 |
|
}, |
|
{ |
|
"epoch": 29.13, |
|
"grad_norm": 5.564302444458008, |
|
"learning_rate": 7.087087087087087e-06, |
|
"loss": 0.5533, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 29.16, |
|
"grad_norm": 4.4252519607543945, |
|
"learning_rate": 7.084084084084085e-06, |
|
"loss": 0.5629, |
|
"step": 9710 |
|
}, |
|
{ |
|
"epoch": 29.19, |
|
"grad_norm": 5.020333290100098, |
|
"learning_rate": 7.0810810810810815e-06, |
|
"loss": 0.5193, |
|
"step": 9720 |
|
}, |
|
{ |
|
"epoch": 29.22, |
|
"grad_norm": 4.683363914489746, |
|
"learning_rate": 7.078078078078079e-06, |
|
"loss": 0.578, |
|
"step": 9730 |
|
}, |
|
{ |
|
"epoch": 29.25, |
|
"grad_norm": 4.328330993652344, |
|
"learning_rate": 7.075075075075075e-06, |
|
"loss": 0.5853, |
|
"step": 9740 |
|
}, |
|
{ |
|
"epoch": 29.28, |
|
"grad_norm": 4.674912452697754, |
|
"learning_rate": 7.072072072072072e-06, |
|
"loss": 0.5226, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 29.31, |
|
"grad_norm": 5.221179008483887, |
|
"learning_rate": 7.06906906906907e-06, |
|
"loss": 0.5026, |
|
"step": 9760 |
|
}, |
|
{ |
|
"epoch": 29.34, |
|
"grad_norm": 5.488534450531006, |
|
"learning_rate": 7.066066066066066e-06, |
|
"loss": 0.5842, |
|
"step": 9770 |
|
}, |
|
{ |
|
"epoch": 29.37, |
|
"grad_norm": 4.482065200805664, |
|
"learning_rate": 7.063063063063064e-06, |
|
"loss": 0.5211, |
|
"step": 9780 |
|
}, |
|
{ |
|
"epoch": 29.4, |
|
"grad_norm": 4.155788898468018, |
|
"learning_rate": 7.060060060060061e-06, |
|
"loss": 0.5769, |
|
"step": 9790 |
|
}, |
|
{ |
|
"epoch": 29.43, |
|
"grad_norm": 3.558490037918091, |
|
"learning_rate": 7.057057057057057e-06, |
|
"loss": 0.5288, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 29.46, |
|
"grad_norm": 4.873110294342041, |
|
"learning_rate": 7.054054054054055e-06, |
|
"loss": 0.5363, |
|
"step": 9810 |
|
}, |
|
{ |
|
"epoch": 29.49, |
|
"grad_norm": 4.669723033905029, |
|
"learning_rate": 7.051051051051052e-06, |
|
"loss": 0.5283, |
|
"step": 9820 |
|
}, |
|
{ |
|
"epoch": 29.52, |
|
"grad_norm": 6.239320278167725, |
|
"learning_rate": 7.048048048048049e-06, |
|
"loss": 0.544, |
|
"step": 9830 |
|
}, |
|
{ |
|
"epoch": 29.55, |
|
"grad_norm": 5.154757976531982, |
|
"learning_rate": 7.045045045045046e-06, |
|
"loss": 0.6143, |
|
"step": 9840 |
|
}, |
|
{ |
|
"epoch": 29.58, |
|
"grad_norm": 5.274998188018799, |
|
"learning_rate": 7.042042042042042e-06, |
|
"loss": 0.5441, |
|
"step": 9850 |
|
}, |
|
{ |
|
"epoch": 29.61, |
|
"grad_norm": 3.258530616760254, |
|
"learning_rate": 7.0390390390390395e-06, |
|
"loss": 0.5639, |
|
"step": 9860 |
|
}, |
|
{ |
|
"epoch": 29.64, |
|
"grad_norm": 5.662111282348633, |
|
"learning_rate": 7.036036036036037e-06, |
|
"loss": 0.5195, |
|
"step": 9870 |
|
}, |
|
{ |
|
"epoch": 29.67, |
|
"grad_norm": 4.919576168060303, |
|
"learning_rate": 7.0330330330330336e-06, |
|
"loss": 0.5178, |
|
"step": 9880 |
|
}, |
|
{ |
|
"epoch": 29.7, |
|
"grad_norm": 6.108078479766846, |
|
"learning_rate": 7.03003003003003e-06, |
|
"loss": 0.5967, |
|
"step": 9890 |
|
}, |
|
{ |
|
"epoch": 29.73, |
|
"grad_norm": 3.6324610710144043, |
|
"learning_rate": 7.027027027027028e-06, |
|
"loss": 0.5153, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 29.76, |
|
"grad_norm": 5.744423866271973, |
|
"learning_rate": 7.024024024024024e-06, |
|
"loss": 0.5474, |
|
"step": 9910 |
|
}, |
|
{ |
|
"epoch": 29.79, |
|
"grad_norm": 4.71481990814209, |
|
"learning_rate": 7.021021021021022e-06, |
|
"loss": 0.5775, |
|
"step": 9920 |
|
}, |
|
{ |
|
"epoch": 29.82, |
|
"grad_norm": 6.34856653213501, |
|
"learning_rate": 7.018018018018019e-06, |
|
"loss": 0.5263, |
|
"step": 9930 |
|
}, |
|
{ |
|
"epoch": 29.85, |
|
"grad_norm": 4.096996784210205, |
|
"learning_rate": 7.015015015015015e-06, |
|
"loss": 0.6233, |
|
"step": 9940 |
|
}, |
|
{ |
|
"epoch": 29.88, |
|
"grad_norm": 7.632816791534424, |
|
"learning_rate": 7.0120120120120125e-06, |
|
"loss": 0.5495, |
|
"step": 9950 |
|
}, |
|
{ |
|
"epoch": 29.91, |
|
"grad_norm": 4.823956489562988, |
|
"learning_rate": 7.009009009009009e-06, |
|
"loss": 0.5571, |
|
"step": 9960 |
|
}, |
|
{ |
|
"epoch": 29.94, |
|
"grad_norm": 3.5478086471557617, |
|
"learning_rate": 7.006006006006007e-06, |
|
"loss": 0.6026, |
|
"step": 9970 |
|
}, |
|
{ |
|
"epoch": 29.97, |
|
"grad_norm": 5.499380111694336, |
|
"learning_rate": 7.003003003003004e-06, |
|
"loss": 0.6102, |
|
"step": 9980 |
|
}, |
|
{ |
|
"epoch": 30.0, |
|
"grad_norm": 52.46392822265625, |
|
"learning_rate": 7e-06, |
|
"loss": 0.6712, |
|
"step": 9990 |
|
}, |
|
{ |
|
"epoch": 30.0, |
|
"eval_accuracy": 0.9183, |
|
"eval_loss": 0.3375239968299866, |
|
"eval_runtime": 26.3797, |
|
"eval_samples_per_second": 379.079, |
|
"eval_steps_per_second": 1.516, |
|
"step": 9990 |
|
}, |
|
{ |
|
"epoch": 30.03, |
|
"grad_norm": 6.403550624847412, |
|
"learning_rate": 6.996996996996997e-06, |
|
"loss": 0.5387, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 30.06, |
|
"grad_norm": 6.658787250518799, |
|
"learning_rate": 6.993993993993995e-06, |
|
"loss": 0.6126, |
|
"step": 10010 |
|
}, |
|
{ |
|
"epoch": 30.09, |
|
"grad_norm": 3.6636009216308594, |
|
"learning_rate": 6.9909909909909915e-06, |
|
"loss": 0.5327, |
|
"step": 10020 |
|
}, |
|
{ |
|
"epoch": 30.12, |
|
"grad_norm": 6.220738410949707, |
|
"learning_rate": 6.987987987987989e-06, |
|
"loss": 0.5723, |
|
"step": 10030 |
|
}, |
|
{ |
|
"epoch": 30.15, |
|
"grad_norm": 5.872793674468994, |
|
"learning_rate": 6.984984984984986e-06, |
|
"loss": 0.5094, |
|
"step": 10040 |
|
}, |
|
{ |
|
"epoch": 30.18, |
|
"grad_norm": 5.606639385223389, |
|
"learning_rate": 6.981981981981982e-06, |
|
"loss": 0.5274, |
|
"step": 10050 |
|
}, |
|
{ |
|
"epoch": 30.21, |
|
"grad_norm": 4.649966716766357, |
|
"learning_rate": 6.97897897897898e-06, |
|
"loss": 0.5077, |
|
"step": 10060 |
|
}, |
|
{ |
|
"epoch": 30.24, |
|
"grad_norm": 3.147263765335083, |
|
"learning_rate": 6.975975975975977e-06, |
|
"loss": 0.5242, |
|
"step": 10070 |
|
}, |
|
{ |
|
"epoch": 30.27, |
|
"grad_norm": 4.2813310623168945, |
|
"learning_rate": 6.972972972972973e-06, |
|
"loss": 0.6066, |
|
"step": 10080 |
|
}, |
|
{ |
|
"epoch": 30.3, |
|
"grad_norm": 4.643487930297852, |
|
"learning_rate": 6.9699699699699704e-06, |
|
"loss": 0.5249, |
|
"step": 10090 |
|
}, |
|
{ |
|
"epoch": 30.33, |
|
"grad_norm": 4.673160552978516, |
|
"learning_rate": 6.966966966966967e-06, |
|
"loss": 0.5169, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 30.36, |
|
"grad_norm": 5.770195007324219, |
|
"learning_rate": 6.9639639639639646e-06, |
|
"loss": 0.5177, |
|
"step": 10110 |
|
}, |
|
{ |
|
"epoch": 30.39, |
|
"grad_norm": 6.050931930541992, |
|
"learning_rate": 6.960960960960962e-06, |
|
"loss": 0.5477, |
|
"step": 10120 |
|
}, |
|
{ |
|
"epoch": 30.42, |
|
"grad_norm": 4.330102443695068, |
|
"learning_rate": 6.957957957957958e-06, |
|
"loss": 0.533, |
|
"step": 10130 |
|
}, |
|
{ |
|
"epoch": 30.45, |
|
"grad_norm": 5.163640022277832, |
|
"learning_rate": 6.954954954954955e-06, |
|
"loss": 0.531, |
|
"step": 10140 |
|
}, |
|
{ |
|
"epoch": 30.48, |
|
"grad_norm": 6.337249755859375, |
|
"learning_rate": 6.951951951951953e-06, |
|
"loss": 0.5424, |
|
"step": 10150 |
|
}, |
|
{ |
|
"epoch": 30.51, |
|
"grad_norm": 4.544989585876465, |
|
"learning_rate": 6.948948948948949e-06, |
|
"loss": 0.5534, |
|
"step": 10160 |
|
}, |
|
{ |
|
"epoch": 30.54, |
|
"grad_norm": 3.9418797492980957, |
|
"learning_rate": 6.945945945945947e-06, |
|
"loss": 0.5304, |
|
"step": 10170 |
|
}, |
|
{ |
|
"epoch": 30.57, |
|
"grad_norm": 3.7905588150024414, |
|
"learning_rate": 6.942942942942944e-06, |
|
"loss": 0.4708, |
|
"step": 10180 |
|
}, |
|
{ |
|
"epoch": 30.6, |
|
"grad_norm": 4.000282287597656, |
|
"learning_rate": 6.93993993993994e-06, |
|
"loss": 0.4982, |
|
"step": 10190 |
|
}, |
|
{ |
|
"epoch": 30.63, |
|
"grad_norm": 4.103896141052246, |
|
"learning_rate": 6.936936936936938e-06, |
|
"loss": 0.5633, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 30.66, |
|
"grad_norm": 3.971468925476074, |
|
"learning_rate": 6.933933933933934e-06, |
|
"loss": 0.4992, |
|
"step": 10210 |
|
}, |
|
{ |
|
"epoch": 30.69, |
|
"grad_norm": 4.747451305389404, |
|
"learning_rate": 6.930930930930932e-06, |
|
"loss": 0.548, |
|
"step": 10220 |
|
}, |
|
{ |
|
"epoch": 30.72, |
|
"grad_norm": 2.6908531188964844, |
|
"learning_rate": 6.927927927927928e-06, |
|
"loss": 0.5545, |
|
"step": 10230 |
|
}, |
|
{ |
|
"epoch": 30.75, |
|
"grad_norm": 4.251894474029541, |
|
"learning_rate": 6.924924924924925e-06, |
|
"loss": 0.4754, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 30.78, |
|
"grad_norm": 4.229438781738281, |
|
"learning_rate": 6.9219219219219225e-06, |
|
"loss": 0.5479, |
|
"step": 10250 |
|
}, |
|
{ |
|
"epoch": 30.81, |
|
"grad_norm": 3.511378288269043, |
|
"learning_rate": 6.91891891891892e-06, |
|
"loss": 0.576, |
|
"step": 10260 |
|
}, |
|
{ |
|
"epoch": 30.84, |
|
"grad_norm": 4.298564910888672, |
|
"learning_rate": 6.915915915915916e-06, |
|
"loss": 0.5447, |
|
"step": 10270 |
|
}, |
|
{ |
|
"epoch": 30.87, |
|
"grad_norm": 3.6350417137145996, |
|
"learning_rate": 6.912912912912913e-06, |
|
"loss": 0.5073, |
|
"step": 10280 |
|
}, |
|
{ |
|
"epoch": 30.9, |
|
"grad_norm": 4.037917613983154, |
|
"learning_rate": 6.909909909909911e-06, |
|
"loss": 0.5399, |
|
"step": 10290 |
|
}, |
|
{ |
|
"epoch": 30.93, |
|
"grad_norm": 4.715947151184082, |
|
"learning_rate": 6.906906906906907e-06, |
|
"loss": 0.5516, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 30.96, |
|
"grad_norm": 7.880973815917969, |
|
"learning_rate": 6.903903903903905e-06, |
|
"loss": 0.5438, |
|
"step": 10310 |
|
}, |
|
{ |
|
"epoch": 30.99, |
|
"grad_norm": 4.695303440093994, |
|
"learning_rate": 6.900900900900901e-06, |
|
"loss": 0.5053, |
|
"step": 10320 |
|
}, |
|
{ |
|
"epoch": 31.0, |
|
"eval_accuracy": 0.9171, |
|
"eval_loss": 0.33546704053878784, |
|
"eval_runtime": 26.5691, |
|
"eval_samples_per_second": 376.376, |
|
"eval_steps_per_second": 1.506, |
|
"step": 10323 |
|
}, |
|
{ |
|
"epoch": 31.02, |
|
"grad_norm": 4.5637712478637695, |
|
"learning_rate": 6.897897897897898e-06, |
|
"loss": 0.5222, |
|
"step": 10330 |
|
}, |
|
{ |
|
"epoch": 31.05, |
|
"grad_norm": 4.128244400024414, |
|
"learning_rate": 6.8948948948948955e-06, |
|
"loss": 0.5947, |
|
"step": 10340 |
|
}, |
|
{ |
|
"epoch": 31.08, |
|
"grad_norm": 4.192866802215576, |
|
"learning_rate": 6.891891891891892e-06, |
|
"loss": 0.5615, |
|
"step": 10350 |
|
}, |
|
{ |
|
"epoch": 31.11, |
|
"grad_norm": 3.5042011737823486, |
|
"learning_rate": 6.88888888888889e-06, |
|
"loss": 0.4907, |
|
"step": 10360 |
|
}, |
|
{ |
|
"epoch": 31.14, |
|
"grad_norm": 5.038252830505371, |
|
"learning_rate": 6.885885885885887e-06, |
|
"loss": 0.4821, |
|
"step": 10370 |
|
}, |
|
{ |
|
"epoch": 31.17, |
|
"grad_norm": 4.238375663757324, |
|
"learning_rate": 6.882882882882883e-06, |
|
"loss": 0.4759, |
|
"step": 10380 |
|
}, |
|
{ |
|
"epoch": 31.2, |
|
"grad_norm": 3.3415908813476562, |
|
"learning_rate": 6.87987987987988e-06, |
|
"loss": 0.5448, |
|
"step": 10390 |
|
}, |
|
{ |
|
"epoch": 31.23, |
|
"grad_norm": 3.935137987136841, |
|
"learning_rate": 6.876876876876878e-06, |
|
"loss": 0.5556, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 31.26, |
|
"grad_norm": 5.417572021484375, |
|
"learning_rate": 6.8738738738738745e-06, |
|
"loss": 0.5054, |
|
"step": 10410 |
|
}, |
|
{ |
|
"epoch": 31.29, |
|
"grad_norm": 3.5927047729492188, |
|
"learning_rate": 6.870870870870872e-06, |
|
"loss": 0.4932, |
|
"step": 10420 |
|
}, |
|
{ |
|
"epoch": 31.32, |
|
"grad_norm": 4.9424147605896, |
|
"learning_rate": 6.867867867867869e-06, |
|
"loss": 0.5413, |
|
"step": 10430 |
|
}, |
|
{ |
|
"epoch": 31.35, |
|
"grad_norm": 4.6282572746276855, |
|
"learning_rate": 6.864864864864865e-06, |
|
"loss": 0.5611, |
|
"step": 10440 |
|
}, |
|
{ |
|
"epoch": 31.38, |
|
"grad_norm": 5.602158546447754, |
|
"learning_rate": 6.861861861861863e-06, |
|
"loss": 0.5624, |
|
"step": 10450 |
|
}, |
|
{ |
|
"epoch": 31.41, |
|
"grad_norm": 4.025387763977051, |
|
"learning_rate": 6.8588588588588585e-06, |
|
"loss": 0.5034, |
|
"step": 10460 |
|
}, |
|
{ |
|
"epoch": 31.44, |
|
"grad_norm": 4.116804599761963, |
|
"learning_rate": 6.855855855855856e-06, |
|
"loss": 0.51, |
|
"step": 10470 |
|
}, |
|
{ |
|
"epoch": 31.47, |
|
"grad_norm": 5.079034805297852, |
|
"learning_rate": 6.8528528528528535e-06, |
|
"loss": 0.5951, |
|
"step": 10480 |
|
}, |
|
{ |
|
"epoch": 31.5, |
|
"grad_norm": 5.287024021148682, |
|
"learning_rate": 6.84984984984985e-06, |
|
"loss": 0.5248, |
|
"step": 10490 |
|
}, |
|
{ |
|
"epoch": 31.53, |
|
"grad_norm": 4.518507957458496, |
|
"learning_rate": 6.846846846846848e-06, |
|
"loss": 0.5718, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 31.56, |
|
"grad_norm": 4.743940830230713, |
|
"learning_rate": 6.843843843843845e-06, |
|
"loss": 0.5249, |
|
"step": 10510 |
|
}, |
|
{ |
|
"epoch": 31.59, |
|
"grad_norm": 3.6553006172180176, |
|
"learning_rate": 6.840840840840841e-06, |
|
"loss": 0.4958, |
|
"step": 10520 |
|
}, |
|
{ |
|
"epoch": 31.62, |
|
"grad_norm": 5.176079750061035, |
|
"learning_rate": 6.837837837837838e-06, |
|
"loss": 0.5568, |
|
"step": 10530 |
|
}, |
|
{ |
|
"epoch": 31.65, |
|
"grad_norm": 7.153314590454102, |
|
"learning_rate": 6.834834834834836e-06, |
|
"loss": 0.4945, |
|
"step": 10540 |
|
}, |
|
{ |
|
"epoch": 31.68, |
|
"grad_norm": 4.934965133666992, |
|
"learning_rate": 6.8318318318318324e-06, |
|
"loss": 0.5023, |
|
"step": 10550 |
|
}, |
|
{ |
|
"epoch": 31.71, |
|
"grad_norm": 5.286891937255859, |
|
"learning_rate": 6.82882882882883e-06, |
|
"loss": 0.5313, |
|
"step": 10560 |
|
}, |
|
{ |
|
"epoch": 31.74, |
|
"grad_norm": 6.007424831390381, |
|
"learning_rate": 6.825825825825826e-06, |
|
"loss": 0.5512, |
|
"step": 10570 |
|
}, |
|
{ |
|
"epoch": 31.77, |
|
"grad_norm": 5.364544868469238, |
|
"learning_rate": 6.822822822822823e-06, |
|
"loss": 0.5405, |
|
"step": 10580 |
|
}, |
|
{ |
|
"epoch": 31.8, |
|
"grad_norm": 4.781249046325684, |
|
"learning_rate": 6.819819819819821e-06, |
|
"loss": 0.5878, |
|
"step": 10590 |
|
}, |
|
{ |
|
"epoch": 31.83, |
|
"grad_norm": 5.178708553314209, |
|
"learning_rate": 6.816816816816817e-06, |
|
"loss": 0.4952, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 31.86, |
|
"grad_norm": 4.609602928161621, |
|
"learning_rate": 6.813813813813815e-06, |
|
"loss": 0.5262, |
|
"step": 10610 |
|
}, |
|
{ |
|
"epoch": 31.89, |
|
"grad_norm": 6.407437801361084, |
|
"learning_rate": 6.810810810810811e-06, |
|
"loss": 0.4991, |
|
"step": 10620 |
|
}, |
|
{ |
|
"epoch": 31.92, |
|
"grad_norm": 3.346106767654419, |
|
"learning_rate": 6.807807807807808e-06, |
|
"loss": 0.5157, |
|
"step": 10630 |
|
}, |
|
{ |
|
"epoch": 31.95, |
|
"grad_norm": 4.991146087646484, |
|
"learning_rate": 6.8048048048048055e-06, |
|
"loss": 0.4841, |
|
"step": 10640 |
|
}, |
|
{ |
|
"epoch": 31.98, |
|
"grad_norm": 4.926365375518799, |
|
"learning_rate": 6.801801801801803e-06, |
|
"loss": 0.5432, |
|
"step": 10650 |
|
}, |
|
{ |
|
"epoch": 32.0, |
|
"eval_accuracy": 0.917, |
|
"eval_loss": 0.3327946662902832, |
|
"eval_runtime": 26.3244, |
|
"eval_samples_per_second": 379.875, |
|
"eval_steps_per_second": 1.52, |
|
"step": 10656 |
|
}, |
|
{ |
|
"epoch": 32.01, |
|
"grad_norm": 5.152103900909424, |
|
"learning_rate": 6.798798798798799e-06, |
|
"loss": 0.466, |
|
"step": 10660 |
|
}, |
|
{ |
|
"epoch": 32.04, |
|
"grad_norm": 4.942965984344482, |
|
"learning_rate": 6.795795795795796e-06, |
|
"loss": 0.5277, |
|
"step": 10670 |
|
}, |
|
{ |
|
"epoch": 32.07, |
|
"grad_norm": 3.914383888244629, |
|
"learning_rate": 6.792792792792793e-06, |
|
"loss": 0.5241, |
|
"step": 10680 |
|
}, |
|
{ |
|
"epoch": 32.1, |
|
"grad_norm": 4.517918109893799, |
|
"learning_rate": 6.78978978978979e-06, |
|
"loss": 0.5013, |
|
"step": 10690 |
|
}, |
|
{ |
|
"epoch": 32.13, |
|
"grad_norm": 4.828157901763916, |
|
"learning_rate": 6.786786786786788e-06, |
|
"loss": 0.5212, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 32.16, |
|
"grad_norm": 5.400358200073242, |
|
"learning_rate": 6.783783783783784e-06, |
|
"loss": 0.5706, |
|
"step": 10710 |
|
}, |
|
{ |
|
"epoch": 32.19, |
|
"grad_norm": 5.0671067237854, |
|
"learning_rate": 6.780780780780781e-06, |
|
"loss": 0.5967, |
|
"step": 10720 |
|
}, |
|
{ |
|
"epoch": 32.22, |
|
"grad_norm": 4.591043949127197, |
|
"learning_rate": 6.777777777777779e-06, |
|
"loss": 0.5163, |
|
"step": 10730 |
|
}, |
|
{ |
|
"epoch": 32.25, |
|
"grad_norm": 5.528631210327148, |
|
"learning_rate": 6.774774774774775e-06, |
|
"loss": 0.5845, |
|
"step": 10740 |
|
}, |
|
{ |
|
"epoch": 32.28, |
|
"grad_norm": 5.988685607910156, |
|
"learning_rate": 6.771771771771773e-06, |
|
"loss": 0.532, |
|
"step": 10750 |
|
}, |
|
{ |
|
"epoch": 32.31, |
|
"grad_norm": 3.5375077724456787, |
|
"learning_rate": 6.76876876876877e-06, |
|
"loss": 0.4809, |
|
"step": 10760 |
|
}, |
|
{ |
|
"epoch": 32.34, |
|
"grad_norm": 4.084622383117676, |
|
"learning_rate": 6.765765765765766e-06, |
|
"loss": 0.469, |
|
"step": 10770 |
|
}, |
|
{ |
|
"epoch": 32.37, |
|
"grad_norm": 5.619451999664307, |
|
"learning_rate": 6.7627627627627634e-06, |
|
"loss": 0.5272, |
|
"step": 10780 |
|
}, |
|
{ |
|
"epoch": 32.4, |
|
"grad_norm": 5.900362968444824, |
|
"learning_rate": 6.759759759759761e-06, |
|
"loss": 0.508, |
|
"step": 10790 |
|
}, |
|
{ |
|
"epoch": 32.43, |
|
"grad_norm": 3.274456739425659, |
|
"learning_rate": 6.7567567567567575e-06, |
|
"loss": 0.5378, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 32.46, |
|
"grad_norm": 5.264253616333008, |
|
"learning_rate": 6.753753753753754e-06, |
|
"loss": 0.5458, |
|
"step": 10810 |
|
}, |
|
{ |
|
"epoch": 32.49, |
|
"grad_norm": 5.250488758087158, |
|
"learning_rate": 6.750750750750751e-06, |
|
"loss": 0.4558, |
|
"step": 10820 |
|
}, |
|
{ |
|
"epoch": 32.52, |
|
"grad_norm": 5.334564208984375, |
|
"learning_rate": 6.747747747747748e-06, |
|
"loss": 0.4916, |
|
"step": 10830 |
|
}, |
|
{ |
|
"epoch": 32.55, |
|
"grad_norm": 4.669946193695068, |
|
"learning_rate": 6.744744744744746e-06, |
|
"loss": 0.5769, |
|
"step": 10840 |
|
}, |
|
{ |
|
"epoch": 32.58, |
|
"grad_norm": 5.030605792999268, |
|
"learning_rate": 6.7417417417417415e-06, |
|
"loss": 0.5051, |
|
"step": 10850 |
|
}, |
|
{ |
|
"epoch": 32.61, |
|
"grad_norm": 4.161187648773193, |
|
"learning_rate": 6.738738738738739e-06, |
|
"loss": 0.4946, |
|
"step": 10860 |
|
}, |
|
{ |
|
"epoch": 32.64, |
|
"grad_norm": 4.491575241088867, |
|
"learning_rate": 6.7357357357357365e-06, |
|
"loss": 0.4936, |
|
"step": 10870 |
|
}, |
|
{ |
|
"epoch": 32.67, |
|
"grad_norm": 4.474544048309326, |
|
"learning_rate": 6.732732732732733e-06, |
|
"loss": 0.4773, |
|
"step": 10880 |
|
}, |
|
{ |
|
"epoch": 32.7, |
|
"grad_norm": 4.078099727630615, |
|
"learning_rate": 6.729729729729731e-06, |
|
"loss": 0.5129, |
|
"step": 10890 |
|
}, |
|
{ |
|
"epoch": 32.73, |
|
"grad_norm": 4.027213096618652, |
|
"learning_rate": 6.726726726726728e-06, |
|
"loss": 0.5077, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 32.76, |
|
"grad_norm": 4.637345790863037, |
|
"learning_rate": 6.723723723723724e-06, |
|
"loss": 0.4782, |
|
"step": 10910 |
|
}, |
|
{ |
|
"epoch": 32.79, |
|
"grad_norm": 4.181465148925781, |
|
"learning_rate": 6.720720720720721e-06, |
|
"loss": 0.503, |
|
"step": 10920 |
|
}, |
|
{ |
|
"epoch": 32.82, |
|
"grad_norm": 3.633840560913086, |
|
"learning_rate": 6.717717717717718e-06, |
|
"loss": 0.4897, |
|
"step": 10930 |
|
}, |
|
{ |
|
"epoch": 32.85, |
|
"grad_norm": 4.9459147453308105, |
|
"learning_rate": 6.7147147147147155e-06, |
|
"loss": 0.4924, |
|
"step": 10940 |
|
}, |
|
{ |
|
"epoch": 32.88, |
|
"grad_norm": 5.306789398193359, |
|
"learning_rate": 6.711711711711713e-06, |
|
"loss": 0.4854, |
|
"step": 10950 |
|
}, |
|
{ |
|
"epoch": 32.91, |
|
"grad_norm": 3.652146339416504, |
|
"learning_rate": 6.708708708708709e-06, |
|
"loss": 0.52, |
|
"step": 10960 |
|
}, |
|
{ |
|
"epoch": 32.94, |
|
"grad_norm": 4.799497604370117, |
|
"learning_rate": 6.705705705705706e-06, |
|
"loss": 0.5273, |
|
"step": 10970 |
|
}, |
|
{ |
|
"epoch": 32.97, |
|
"grad_norm": 4.157444477081299, |
|
"learning_rate": 6.702702702702704e-06, |
|
"loss": 0.4617, |
|
"step": 10980 |
|
}, |
|
{ |
|
"epoch": 33.0, |
|
"eval_accuracy": 0.9191, |
|
"eval_loss": 0.32952603697776794, |
|
"eval_runtime": 26.412, |
|
"eval_samples_per_second": 378.616, |
|
"eval_steps_per_second": 1.514, |
|
"step": 10989 |
|
}, |
|
{ |
|
"epoch": 33.0, |
|
"grad_norm": 4.564639568328857, |
|
"learning_rate": 6.6996996996997e-06, |
|
"loss": 0.432, |
|
"step": 10990 |
|
}, |
|
{ |
|
"epoch": 33.03, |
|
"grad_norm": 6.127586841583252, |
|
"learning_rate": 6.696696696696697e-06, |
|
"loss": 0.5657, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 33.06, |
|
"grad_norm": 4.598598003387451, |
|
"learning_rate": 6.693693693693694e-06, |
|
"loss": 0.5202, |
|
"step": 11010 |
|
}, |
|
{ |
|
"epoch": 33.09, |
|
"grad_norm": 4.336296081542969, |
|
"learning_rate": 6.690690690690691e-06, |
|
"loss": 0.5148, |
|
"step": 11020 |
|
}, |
|
{ |
|
"epoch": 33.12, |
|
"grad_norm": 5.0793304443359375, |
|
"learning_rate": 6.6876876876876885e-06, |
|
"loss": 0.5735, |
|
"step": 11030 |
|
}, |
|
{ |
|
"epoch": 33.15, |
|
"grad_norm": 5.527975082397461, |
|
"learning_rate": 6.684684684684684e-06, |
|
"loss": 0.4741, |
|
"step": 11040 |
|
}, |
|
{ |
|
"epoch": 33.18, |
|
"grad_norm": 4.1530375480651855, |
|
"learning_rate": 6.681681681681682e-06, |
|
"loss": 0.4882, |
|
"step": 11050 |
|
}, |
|
{ |
|
"epoch": 33.21, |
|
"grad_norm": 6.072080612182617, |
|
"learning_rate": 6.678678678678679e-06, |
|
"loss": 0.5918, |
|
"step": 11060 |
|
}, |
|
{ |
|
"epoch": 33.24, |
|
"grad_norm": 4.9077863693237305, |
|
"learning_rate": 6.675675675675676e-06, |
|
"loss": 0.5253, |
|
"step": 11070 |
|
}, |
|
{ |
|
"epoch": 33.27, |
|
"grad_norm": 3.628720998764038, |
|
"learning_rate": 6.672672672672673e-06, |
|
"loss": 0.4282, |
|
"step": 11080 |
|
}, |
|
{ |
|
"epoch": 33.3, |
|
"grad_norm": 6.934624671936035, |
|
"learning_rate": 6.669669669669671e-06, |
|
"loss": 0.5032, |
|
"step": 11090 |
|
}, |
|
{ |
|
"epoch": 33.33, |
|
"grad_norm": 5.978390693664551, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.5319, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 33.36, |
|
"grad_norm": 7.497355937957764, |
|
"learning_rate": 6.663663663663664e-06, |
|
"loss": 0.5025, |
|
"step": 11110 |
|
}, |
|
{ |
|
"epoch": 33.39, |
|
"grad_norm": 4.441858291625977, |
|
"learning_rate": 6.660660660660662e-06, |
|
"loss": 0.4608, |
|
"step": 11120 |
|
}, |
|
{ |
|
"epoch": 33.42, |
|
"grad_norm": 4.574911117553711, |
|
"learning_rate": 6.657657657657658e-06, |
|
"loss": 0.5327, |
|
"step": 11130 |
|
}, |
|
{ |
|
"epoch": 33.45, |
|
"grad_norm": 4.213235855102539, |
|
"learning_rate": 6.654654654654656e-06, |
|
"loss": 0.4994, |
|
"step": 11140 |
|
}, |
|
{ |
|
"epoch": 33.48, |
|
"grad_norm": 4.079199314117432, |
|
"learning_rate": 6.651651651651652e-06, |
|
"loss": 0.4945, |
|
"step": 11150 |
|
}, |
|
{ |
|
"epoch": 33.51, |
|
"grad_norm": 4.505585193634033, |
|
"learning_rate": 6.648648648648649e-06, |
|
"loss": 0.5462, |
|
"step": 11160 |
|
}, |
|
{ |
|
"epoch": 33.54, |
|
"grad_norm": 7.309465408325195, |
|
"learning_rate": 6.6456456456456465e-06, |
|
"loss": 0.4975, |
|
"step": 11170 |
|
}, |
|
{ |
|
"epoch": 33.57, |
|
"grad_norm": 5.451502323150635, |
|
"learning_rate": 6.642642642642643e-06, |
|
"loss": 0.5328, |
|
"step": 11180 |
|
}, |
|
{ |
|
"epoch": 33.6, |
|
"grad_norm": 5.075081825256348, |
|
"learning_rate": 6.63963963963964e-06, |
|
"loss": 0.4684, |
|
"step": 11190 |
|
}, |
|
{ |
|
"epoch": 33.63, |
|
"grad_norm": 15.912128448486328, |
|
"learning_rate": 6.636636636636637e-06, |
|
"loss": 0.4503, |
|
"step": 11200 |
|
}, |
|
{ |
|
"epoch": 33.66, |
|
"grad_norm": 4.8083295822143555, |
|
"learning_rate": 6.633633633633634e-06, |
|
"loss": 0.5233, |
|
"step": 11210 |
|
}, |
|
{ |
|
"epoch": 33.69, |
|
"grad_norm": 6.54976224899292, |
|
"learning_rate": 6.630630630630631e-06, |
|
"loss": 0.564, |
|
"step": 11220 |
|
}, |
|
{ |
|
"epoch": 33.72, |
|
"grad_norm": 5.533057689666748, |
|
"learning_rate": 6.627627627627629e-06, |
|
"loss": 0.5073, |
|
"step": 11230 |
|
}, |
|
{ |
|
"epoch": 33.75, |
|
"grad_norm": 5.056384563446045, |
|
"learning_rate": 6.6246246246246246e-06, |
|
"loss": 0.5933, |
|
"step": 11240 |
|
}, |
|
{ |
|
"epoch": 33.78, |
|
"grad_norm": 4.461758136749268, |
|
"learning_rate": 6.621621621621622e-06, |
|
"loss": 0.5609, |
|
"step": 11250 |
|
}, |
|
{ |
|
"epoch": 33.81, |
|
"grad_norm": 4.772905349731445, |
|
"learning_rate": 6.6186186186186195e-06, |
|
"loss": 0.5211, |
|
"step": 11260 |
|
}, |
|
{ |
|
"epoch": 33.84, |
|
"grad_norm": 4.28291654586792, |
|
"learning_rate": 6.615615615615616e-06, |
|
"loss": 0.5741, |
|
"step": 11270 |
|
}, |
|
{ |
|
"epoch": 33.87, |
|
"grad_norm": 4.813282489776611, |
|
"learning_rate": 6.612612612612614e-06, |
|
"loss": 0.5078, |
|
"step": 11280 |
|
}, |
|
{ |
|
"epoch": 33.9, |
|
"grad_norm": 6.550593852996826, |
|
"learning_rate": 6.609609609609609e-06, |
|
"loss": 0.5039, |
|
"step": 11290 |
|
}, |
|
{ |
|
"epoch": 33.93, |
|
"grad_norm": 3.385647773742676, |
|
"learning_rate": 6.606606606606607e-06, |
|
"loss": 0.498, |
|
"step": 11300 |
|
}, |
|
{ |
|
"epoch": 33.96, |
|
"grad_norm": 6.887855529785156, |
|
"learning_rate": 6.603603603603604e-06, |
|
"loss": 0.5166, |
|
"step": 11310 |
|
}, |
|
{ |
|
"epoch": 33.99, |
|
"grad_norm": 4.921820163726807, |
|
"learning_rate": 6.600600600600601e-06, |
|
"loss": 0.4784, |
|
"step": 11320 |
|
}, |
|
{ |
|
"epoch": 34.0, |
|
"eval_accuracy": 0.918, |
|
"eval_loss": 0.3249960243701935, |
|
"eval_runtime": 27.0964, |
|
"eval_samples_per_second": 369.052, |
|
"eval_steps_per_second": 1.476, |
|
"step": 11322 |
|
}, |
|
{ |
|
"epoch": 34.02, |
|
"grad_norm": 8.019493103027344, |
|
"learning_rate": 6.5975975975975985e-06, |
|
"loss": 0.5349, |
|
"step": 11330 |
|
}, |
|
{ |
|
"epoch": 34.05, |
|
"grad_norm": 5.084954261779785, |
|
"learning_rate": 6.594594594594595e-06, |
|
"loss": 0.4918, |
|
"step": 11340 |
|
}, |
|
{ |
|
"epoch": 34.08, |
|
"grad_norm": 4.809759140014648, |
|
"learning_rate": 6.591591591591592e-06, |
|
"loss": 0.5505, |
|
"step": 11350 |
|
}, |
|
{ |
|
"epoch": 34.11, |
|
"grad_norm": 4.049439430236816, |
|
"learning_rate": 6.588588588588589e-06, |
|
"loss": 0.4728, |
|
"step": 11360 |
|
}, |
|
{ |
|
"epoch": 34.14, |
|
"grad_norm": 4.804196834564209, |
|
"learning_rate": 6.585585585585587e-06, |
|
"loss": 0.4778, |
|
"step": 11370 |
|
}, |
|
{ |
|
"epoch": 34.17, |
|
"grad_norm": 4.499908924102783, |
|
"learning_rate": 6.5825825825825825e-06, |
|
"loss": 0.4996, |
|
"step": 11380 |
|
}, |
|
{ |
|
"epoch": 34.2, |
|
"grad_norm": 5.686038970947266, |
|
"learning_rate": 6.57957957957958e-06, |
|
"loss": 0.5266, |
|
"step": 11390 |
|
}, |
|
{ |
|
"epoch": 34.23, |
|
"grad_norm": 6.619571208953857, |
|
"learning_rate": 6.5765765765765775e-06, |
|
"loss": 0.537, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 34.26, |
|
"grad_norm": 4.216808319091797, |
|
"learning_rate": 6.573573573573574e-06, |
|
"loss": 0.4832, |
|
"step": 11410 |
|
}, |
|
{ |
|
"epoch": 34.29, |
|
"grad_norm": 5.596823692321777, |
|
"learning_rate": 6.5705705705705716e-06, |
|
"loss": 0.5208, |
|
"step": 11420 |
|
}, |
|
{ |
|
"epoch": 34.32, |
|
"grad_norm": 3.564544677734375, |
|
"learning_rate": 6.567567567567567e-06, |
|
"loss": 0.4957, |
|
"step": 11430 |
|
}, |
|
{ |
|
"epoch": 34.35, |
|
"grad_norm": 2.9732589721679688, |
|
"learning_rate": 6.564564564564565e-06, |
|
"loss": 0.4692, |
|
"step": 11440 |
|
}, |
|
{ |
|
"epoch": 34.38, |
|
"grad_norm": 4.947221755981445, |
|
"learning_rate": 6.561561561561562e-06, |
|
"loss": 0.5326, |
|
"step": 11450 |
|
}, |
|
{ |
|
"epoch": 34.41, |
|
"grad_norm": 4.498212814331055, |
|
"learning_rate": 6.558558558558559e-06, |
|
"loss": 0.4989, |
|
"step": 11460 |
|
}, |
|
{ |
|
"epoch": 34.44, |
|
"grad_norm": 6.685206413269043, |
|
"learning_rate": 6.555555555555556e-06, |
|
"loss": 0.5345, |
|
"step": 11470 |
|
}, |
|
{ |
|
"epoch": 34.47, |
|
"grad_norm": 5.036656379699707, |
|
"learning_rate": 6.552552552552554e-06, |
|
"loss": 0.5189, |
|
"step": 11480 |
|
}, |
|
{ |
|
"epoch": 34.5, |
|
"grad_norm": 5.761340141296387, |
|
"learning_rate": 6.54954954954955e-06, |
|
"loss": 0.4814, |
|
"step": 11490 |
|
}, |
|
{ |
|
"epoch": 34.53, |
|
"grad_norm": 8.407565116882324, |
|
"learning_rate": 6.546546546546547e-06, |
|
"loss": 0.4944, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 34.56, |
|
"grad_norm": 4.169130802154541, |
|
"learning_rate": 6.543543543543545e-06, |
|
"loss": 0.5854, |
|
"step": 11510 |
|
}, |
|
{ |
|
"epoch": 34.59, |
|
"grad_norm": 5.203417778015137, |
|
"learning_rate": 6.540540540540541e-06, |
|
"loss": 0.5219, |
|
"step": 11520 |
|
}, |
|
{ |
|
"epoch": 34.62, |
|
"grad_norm": 6.78031063079834, |
|
"learning_rate": 6.537537537537538e-06, |
|
"loss": 0.5629, |
|
"step": 11530 |
|
}, |
|
{ |
|
"epoch": 34.65, |
|
"grad_norm": 5.361577033996582, |
|
"learning_rate": 6.5345345345345345e-06, |
|
"loss": 0.4669, |
|
"step": 11540 |
|
}, |
|
{ |
|
"epoch": 34.68, |
|
"grad_norm": 3.6456704139709473, |
|
"learning_rate": 6.531531531531532e-06, |
|
"loss": 0.5041, |
|
"step": 11550 |
|
}, |
|
{ |
|
"epoch": 34.71, |
|
"grad_norm": 4.542550086975098, |
|
"learning_rate": 6.5285285285285295e-06, |
|
"loss": 0.525, |
|
"step": 11560 |
|
}, |
|
{ |
|
"epoch": 34.74, |
|
"grad_norm": 5.005729675292969, |
|
"learning_rate": 6.525525525525525e-06, |
|
"loss": 0.4992, |
|
"step": 11570 |
|
}, |
|
{ |
|
"epoch": 34.77, |
|
"grad_norm": 4.900482654571533, |
|
"learning_rate": 6.522522522522523e-06, |
|
"loss": 0.5136, |
|
"step": 11580 |
|
}, |
|
{ |
|
"epoch": 34.8, |
|
"grad_norm": 5.906069755554199, |
|
"learning_rate": 6.51951951951952e-06, |
|
"loss": 0.5294, |
|
"step": 11590 |
|
}, |
|
{ |
|
"epoch": 34.83, |
|
"grad_norm": 3.6246538162231445, |
|
"learning_rate": 6.516516516516517e-06, |
|
"loss": 0.4772, |
|
"step": 11600 |
|
}, |
|
{ |
|
"epoch": 34.86, |
|
"grad_norm": 4.865238666534424, |
|
"learning_rate": 6.513513513513514e-06, |
|
"loss": 0.5077, |
|
"step": 11610 |
|
}, |
|
{ |
|
"epoch": 34.89, |
|
"grad_norm": 4.1368021965026855, |
|
"learning_rate": 6.510510510510512e-06, |
|
"loss": 0.5087, |
|
"step": 11620 |
|
}, |
|
{ |
|
"epoch": 34.92, |
|
"grad_norm": 4.264941215515137, |
|
"learning_rate": 6.507507507507508e-06, |
|
"loss": 0.5117, |
|
"step": 11630 |
|
}, |
|
{ |
|
"epoch": 34.95, |
|
"grad_norm": 3.78835129737854, |
|
"learning_rate": 6.504504504504505e-06, |
|
"loss": 0.4964, |
|
"step": 11640 |
|
}, |
|
{ |
|
"epoch": 34.98, |
|
"grad_norm": 5.336776256561279, |
|
"learning_rate": 6.501501501501502e-06, |
|
"loss": 0.5088, |
|
"step": 11650 |
|
}, |
|
{ |
|
"epoch": 35.0, |
|
"eval_accuracy": 0.9195, |
|
"eval_loss": 0.31883320212364197, |
|
"eval_runtime": 26.4852, |
|
"eval_samples_per_second": 377.57, |
|
"eval_steps_per_second": 1.51, |
|
"step": 11655 |
|
}, |
|
{ |
|
"epoch": 35.02, |
|
"grad_norm": 5.168522834777832, |
|
"learning_rate": 6.498498498498499e-06, |
|
"loss": 0.493, |
|
"step": 11660 |
|
}, |
|
{ |
|
"epoch": 35.05, |
|
"grad_norm": 5.572307586669922, |
|
"learning_rate": 6.495495495495497e-06, |
|
"loss": 0.4604, |
|
"step": 11670 |
|
}, |
|
{ |
|
"epoch": 35.08, |
|
"grad_norm": 4.209198474884033, |
|
"learning_rate": 6.4924924924924924e-06, |
|
"loss": 0.5438, |
|
"step": 11680 |
|
}, |
|
{ |
|
"epoch": 35.11, |
|
"grad_norm": 4.411211013793945, |
|
"learning_rate": 6.48948948948949e-06, |
|
"loss": 0.4837, |
|
"step": 11690 |
|
}, |
|
{ |
|
"epoch": 35.14, |
|
"grad_norm": 4.3489789962768555, |
|
"learning_rate": 6.486486486486487e-06, |
|
"loss": 0.5236, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 35.17, |
|
"grad_norm": 4.487356662750244, |
|
"learning_rate": 6.483483483483484e-06, |
|
"loss": 0.5342, |
|
"step": 11710 |
|
}, |
|
{ |
|
"epoch": 35.2, |
|
"grad_norm": 4.561159133911133, |
|
"learning_rate": 6.480480480480481e-06, |
|
"loss": 0.4909, |
|
"step": 11720 |
|
}, |
|
{ |
|
"epoch": 35.23, |
|
"grad_norm": 4.875969886779785, |
|
"learning_rate": 6.477477477477478e-06, |
|
"loss": 0.5449, |
|
"step": 11730 |
|
}, |
|
{ |
|
"epoch": 35.26, |
|
"grad_norm": 4.210017204284668, |
|
"learning_rate": 6.474474474474475e-06, |
|
"loss": 0.5407, |
|
"step": 11740 |
|
}, |
|
{ |
|
"epoch": 35.29, |
|
"grad_norm": 4.259859561920166, |
|
"learning_rate": 6.471471471471472e-06, |
|
"loss": 0.4857, |
|
"step": 11750 |
|
}, |
|
{ |
|
"epoch": 35.32, |
|
"grad_norm": 4.824864864349365, |
|
"learning_rate": 6.46846846846847e-06, |
|
"loss": 0.5466, |
|
"step": 11760 |
|
}, |
|
{ |
|
"epoch": 35.35, |
|
"grad_norm": 5.433022499084473, |
|
"learning_rate": 6.4654654654654655e-06, |
|
"loss": 0.4856, |
|
"step": 11770 |
|
}, |
|
{ |
|
"epoch": 35.38, |
|
"grad_norm": 4.4445343017578125, |
|
"learning_rate": 6.462462462462463e-06, |
|
"loss": 0.5254, |
|
"step": 11780 |
|
}, |
|
{ |
|
"epoch": 35.41, |
|
"grad_norm": 5.057773590087891, |
|
"learning_rate": 6.45945945945946e-06, |
|
"loss": 0.5527, |
|
"step": 11790 |
|
}, |
|
{ |
|
"epoch": 35.44, |
|
"grad_norm": 3.377410650253296, |
|
"learning_rate": 6.456456456456457e-06, |
|
"loss": 0.4687, |
|
"step": 11800 |
|
}, |
|
{ |
|
"epoch": 35.47, |
|
"grad_norm": 6.953456878662109, |
|
"learning_rate": 6.453453453453455e-06, |
|
"loss": 0.5279, |
|
"step": 11810 |
|
}, |
|
{ |
|
"epoch": 35.5, |
|
"grad_norm": 4.230295181274414, |
|
"learning_rate": 6.45045045045045e-06, |
|
"loss": 0.5659, |
|
"step": 11820 |
|
}, |
|
{ |
|
"epoch": 35.53, |
|
"grad_norm": 4.3861918449401855, |
|
"learning_rate": 6.447447447447448e-06, |
|
"loss": 0.5215, |
|
"step": 11830 |
|
}, |
|
{ |
|
"epoch": 35.56, |
|
"grad_norm": 5.295562267303467, |
|
"learning_rate": 6.444444444444445e-06, |
|
"loss": 0.5083, |
|
"step": 11840 |
|
}, |
|
{ |
|
"epoch": 35.59, |
|
"grad_norm": 5.285841941833496, |
|
"learning_rate": 6.441441441441442e-06, |
|
"loss": 0.5354, |
|
"step": 11850 |
|
}, |
|
{ |
|
"epoch": 35.62, |
|
"grad_norm": 3.731440305709839, |
|
"learning_rate": 6.4384384384384394e-06, |
|
"loss": 0.4666, |
|
"step": 11860 |
|
}, |
|
{ |
|
"epoch": 35.65, |
|
"grad_norm": 4.5777082443237305, |
|
"learning_rate": 6.435435435435436e-06, |
|
"loss": 0.537, |
|
"step": 11870 |
|
}, |
|
{ |
|
"epoch": 35.68, |
|
"grad_norm": 5.057626724243164, |
|
"learning_rate": 6.432432432432433e-06, |
|
"loss": 0.5012, |
|
"step": 11880 |
|
}, |
|
{ |
|
"epoch": 35.71, |
|
"grad_norm": 3.5204951763153076, |
|
"learning_rate": 6.42942942942943e-06, |
|
"loss": 0.5544, |
|
"step": 11890 |
|
}, |
|
{ |
|
"epoch": 35.74, |
|
"grad_norm": 4.735846996307373, |
|
"learning_rate": 6.426426426426427e-06, |
|
"loss": 0.5025, |
|
"step": 11900 |
|
}, |
|
{ |
|
"epoch": 35.77, |
|
"grad_norm": 4.379212379455566, |
|
"learning_rate": 6.4234234234234234e-06, |
|
"loss": 0.4677, |
|
"step": 11910 |
|
}, |
|
{ |
|
"epoch": 35.8, |
|
"grad_norm": 4.914200782775879, |
|
"learning_rate": 6.420420420420421e-06, |
|
"loss": 0.5281, |
|
"step": 11920 |
|
}, |
|
{ |
|
"epoch": 35.83, |
|
"grad_norm": 5.16389274597168, |
|
"learning_rate": 6.4174174174174176e-06, |
|
"loss": 0.467, |
|
"step": 11930 |
|
}, |
|
{ |
|
"epoch": 35.86, |
|
"grad_norm": 4.597350597381592, |
|
"learning_rate": 6.414414414414415e-06, |
|
"loss": 0.5338, |
|
"step": 11940 |
|
}, |
|
{ |
|
"epoch": 35.89, |
|
"grad_norm": 4.771205902099609, |
|
"learning_rate": 6.4114114114114125e-06, |
|
"loss": 0.4937, |
|
"step": 11950 |
|
}, |
|
{ |
|
"epoch": 35.92, |
|
"grad_norm": 5.844981670379639, |
|
"learning_rate": 6.408408408408408e-06, |
|
"loss": 0.5128, |
|
"step": 11960 |
|
}, |
|
{ |
|
"epoch": 35.95, |
|
"grad_norm": 4.290022373199463, |
|
"learning_rate": 6.405405405405406e-06, |
|
"loss": 0.4717, |
|
"step": 11970 |
|
}, |
|
{ |
|
"epoch": 35.98, |
|
"grad_norm": 3.495290994644165, |
|
"learning_rate": 6.402402402402403e-06, |
|
"loss": 0.5121, |
|
"step": 11980 |
|
}, |
|
{ |
|
"epoch": 36.0, |
|
"eval_accuracy": 0.9172, |
|
"eval_loss": 0.31883522868156433, |
|
"eval_runtime": 26.15, |
|
"eval_samples_per_second": 382.409, |
|
"eval_steps_per_second": 1.53, |
|
"step": 11988 |
|
}, |
|
{ |
|
"epoch": 36.01, |
|
"grad_norm": 4.688857555389404, |
|
"learning_rate": 6.3993993993994e-06, |
|
"loss": 0.4498, |
|
"step": 11990 |
|
}, |
|
{ |
|
"epoch": 36.04, |
|
"grad_norm": 4.36939001083374, |
|
"learning_rate": 6.396396396396397e-06, |
|
"loss": 0.4247, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 36.07, |
|
"grad_norm": 3.401184558868408, |
|
"learning_rate": 6.393393393393393e-06, |
|
"loss": 0.5333, |
|
"step": 12010 |
|
}, |
|
{ |
|
"epoch": 36.1, |
|
"grad_norm": 4.953362941741943, |
|
"learning_rate": 6.390390390390391e-06, |
|
"loss": 0.4527, |
|
"step": 12020 |
|
}, |
|
{ |
|
"epoch": 36.13, |
|
"grad_norm": 3.899674892425537, |
|
"learning_rate": 6.387387387387388e-06, |
|
"loss": 0.5379, |
|
"step": 12030 |
|
}, |
|
{ |
|
"epoch": 36.16, |
|
"grad_norm": 3.688502550125122, |
|
"learning_rate": 6.384384384384385e-06, |
|
"loss": 0.5328, |
|
"step": 12040 |
|
}, |
|
{ |
|
"epoch": 36.19, |
|
"grad_norm": 3.257268190383911, |
|
"learning_rate": 6.381381381381382e-06, |
|
"loss": 0.4246, |
|
"step": 12050 |
|
}, |
|
{ |
|
"epoch": 36.22, |
|
"grad_norm": 3.4354043006896973, |
|
"learning_rate": 6.378378378378379e-06, |
|
"loss": 0.4765, |
|
"step": 12060 |
|
}, |
|
{ |
|
"epoch": 36.25, |
|
"grad_norm": 5.303877830505371, |
|
"learning_rate": 6.3753753753753755e-06, |
|
"loss": 0.431, |
|
"step": 12070 |
|
}, |
|
{ |
|
"epoch": 36.28, |
|
"grad_norm": 4.975677490234375, |
|
"learning_rate": 6.372372372372373e-06, |
|
"loss": 0.4844, |
|
"step": 12080 |
|
}, |
|
{ |
|
"epoch": 36.31, |
|
"grad_norm": 4.587069034576416, |
|
"learning_rate": 6.3693693693693704e-06, |
|
"loss": 0.4586, |
|
"step": 12090 |
|
}, |
|
{ |
|
"epoch": 36.34, |
|
"grad_norm": 4.4542036056518555, |
|
"learning_rate": 6.366366366366366e-06, |
|
"loss": 0.4543, |
|
"step": 12100 |
|
}, |
|
{ |
|
"epoch": 36.37, |
|
"grad_norm": 4.5289177894592285, |
|
"learning_rate": 6.363363363363364e-06, |
|
"loss": 0.4921, |
|
"step": 12110 |
|
}, |
|
{ |
|
"epoch": 36.4, |
|
"grad_norm": 3.766392230987549, |
|
"learning_rate": 6.360360360360361e-06, |
|
"loss": 0.5229, |
|
"step": 12120 |
|
}, |
|
{ |
|
"epoch": 36.43, |
|
"grad_norm": 4.781548023223877, |
|
"learning_rate": 6.357357357357358e-06, |
|
"loss": 0.5054, |
|
"step": 12130 |
|
}, |
|
{ |
|
"epoch": 36.46, |
|
"grad_norm": 3.786529541015625, |
|
"learning_rate": 6.354354354354355e-06, |
|
"loss": 0.4808, |
|
"step": 12140 |
|
}, |
|
{ |
|
"epoch": 36.49, |
|
"grad_norm": 4.112846374511719, |
|
"learning_rate": 6.351351351351351e-06, |
|
"loss": 0.4725, |
|
"step": 12150 |
|
}, |
|
{ |
|
"epoch": 36.52, |
|
"grad_norm": 5.792331218719482, |
|
"learning_rate": 6.3483483483483485e-06, |
|
"loss": 0.495, |
|
"step": 12160 |
|
}, |
|
{ |
|
"epoch": 36.55, |
|
"grad_norm": 11.136505126953125, |
|
"learning_rate": 6.345345345345346e-06, |
|
"loss": 0.5116, |
|
"step": 12170 |
|
}, |
|
{ |
|
"epoch": 36.58, |
|
"grad_norm": 4.237509727478027, |
|
"learning_rate": 6.342342342342343e-06, |
|
"loss": 0.5132, |
|
"step": 12180 |
|
}, |
|
{ |
|
"epoch": 36.61, |
|
"grad_norm": 5.541947841644287, |
|
"learning_rate": 6.33933933933934e-06, |
|
"loss": 0.4997, |
|
"step": 12190 |
|
}, |
|
{ |
|
"epoch": 36.64, |
|
"grad_norm": 9.812138557434082, |
|
"learning_rate": 6.336336336336338e-06, |
|
"loss": 0.4906, |
|
"step": 12200 |
|
}, |
|
{ |
|
"epoch": 36.67, |
|
"grad_norm": 5.890273094177246, |
|
"learning_rate": 6.333333333333333e-06, |
|
"loss": 0.4977, |
|
"step": 12210 |
|
}, |
|
{ |
|
"epoch": 36.7, |
|
"grad_norm": 4.361057758331299, |
|
"learning_rate": 6.330330330330331e-06, |
|
"loss": 0.4694, |
|
"step": 12220 |
|
}, |
|
{ |
|
"epoch": 36.73, |
|
"grad_norm": 6.098171234130859, |
|
"learning_rate": 6.327327327327328e-06, |
|
"loss": 0.495, |
|
"step": 12230 |
|
}, |
|
{ |
|
"epoch": 36.76, |
|
"grad_norm": 3.5321567058563232, |
|
"learning_rate": 6.324324324324325e-06, |
|
"loss": 0.4492, |
|
"step": 12240 |
|
}, |
|
{ |
|
"epoch": 36.79, |
|
"grad_norm": 5.207321643829346, |
|
"learning_rate": 6.321321321321322e-06, |
|
"loss": 0.4912, |
|
"step": 12250 |
|
}, |
|
{ |
|
"epoch": 36.82, |
|
"grad_norm": 5.184374809265137, |
|
"learning_rate": 6.318318318318318e-06, |
|
"loss": 0.512, |
|
"step": 12260 |
|
}, |
|
{ |
|
"epoch": 36.85, |
|
"grad_norm": 4.539032459259033, |
|
"learning_rate": 6.315315315315316e-06, |
|
"loss": 0.444, |
|
"step": 12270 |
|
}, |
|
{ |
|
"epoch": 36.88, |
|
"grad_norm": 6.274582862854004, |
|
"learning_rate": 6.312312312312313e-06, |
|
"loss": 0.4954, |
|
"step": 12280 |
|
}, |
|
{ |
|
"epoch": 36.91, |
|
"grad_norm": 4.470296859741211, |
|
"learning_rate": 6.309309309309309e-06, |
|
"loss": 0.4896, |
|
"step": 12290 |
|
}, |
|
{ |
|
"epoch": 36.94, |
|
"grad_norm": 4.493797779083252, |
|
"learning_rate": 6.3063063063063065e-06, |
|
"loss": 0.4783, |
|
"step": 12300 |
|
}, |
|
{ |
|
"epoch": 36.97, |
|
"grad_norm": 5.021604537963867, |
|
"learning_rate": 6.303303303303304e-06, |
|
"loss": 0.5179, |
|
"step": 12310 |
|
}, |
|
{ |
|
"epoch": 37.0, |
|
"grad_norm": 4.532876014709473, |
|
"learning_rate": 6.300300300300301e-06, |
|
"loss": 0.4734, |
|
"step": 12320 |
|
}, |
|
{ |
|
"epoch": 37.0, |
|
"eval_accuracy": 0.9193, |
|
"eval_loss": 0.3174118101596832, |
|
"eval_runtime": 25.9732, |
|
"eval_samples_per_second": 385.012, |
|
"eval_steps_per_second": 1.54, |
|
"step": 12321 |
|
}, |
|
{ |
|
"epoch": 37.03, |
|
"grad_norm": 4.60337495803833, |
|
"learning_rate": 6.297297297297298e-06, |
|
"loss": 0.467, |
|
"step": 12330 |
|
}, |
|
{ |
|
"epoch": 37.06, |
|
"grad_norm": 4.364501476287842, |
|
"learning_rate": 6.2942942942942955e-06, |
|
"loss": 0.457, |
|
"step": 12340 |
|
}, |
|
{ |
|
"epoch": 37.09, |
|
"grad_norm": 6.581215858459473, |
|
"learning_rate": 6.291291291291291e-06, |
|
"loss": 0.483, |
|
"step": 12350 |
|
}, |
|
{ |
|
"epoch": 37.12, |
|
"grad_norm": 5.2674150466918945, |
|
"learning_rate": 6.288288288288289e-06, |
|
"loss": 0.5288, |
|
"step": 12360 |
|
}, |
|
{ |
|
"epoch": 37.15, |
|
"grad_norm": 5.03020715713501, |
|
"learning_rate": 6.2852852852852854e-06, |
|
"loss": 0.4659, |
|
"step": 12370 |
|
}, |
|
{ |
|
"epoch": 37.18, |
|
"grad_norm": 4.408522605895996, |
|
"learning_rate": 6.282282282282283e-06, |
|
"loss": 0.4963, |
|
"step": 12380 |
|
}, |
|
{ |
|
"epoch": 37.21, |
|
"grad_norm": 4.20792818069458, |
|
"learning_rate": 6.27927927927928e-06, |
|
"loss": 0.5078, |
|
"step": 12390 |
|
}, |
|
{ |
|
"epoch": 37.24, |
|
"grad_norm": 3.7951042652130127, |
|
"learning_rate": 6.276276276276276e-06, |
|
"loss": 0.5024, |
|
"step": 12400 |
|
}, |
|
{ |
|
"epoch": 37.27, |
|
"grad_norm": 5.275200366973877, |
|
"learning_rate": 6.273273273273274e-06, |
|
"loss": 0.4775, |
|
"step": 12410 |
|
}, |
|
{ |
|
"epoch": 37.3, |
|
"grad_norm": 4.369055271148682, |
|
"learning_rate": 6.270270270270271e-06, |
|
"loss": 0.4902, |
|
"step": 12420 |
|
}, |
|
{ |
|
"epoch": 37.33, |
|
"grad_norm": 6.0532331466674805, |
|
"learning_rate": 6.267267267267268e-06, |
|
"loss": 0.4808, |
|
"step": 12430 |
|
}, |
|
{ |
|
"epoch": 37.36, |
|
"grad_norm": 5.8879594802856445, |
|
"learning_rate": 6.264264264264264e-06, |
|
"loss": 0.506, |
|
"step": 12440 |
|
}, |
|
{ |
|
"epoch": 37.39, |
|
"grad_norm": 4.580647945404053, |
|
"learning_rate": 6.261261261261262e-06, |
|
"loss": 0.4653, |
|
"step": 12450 |
|
}, |
|
{ |
|
"epoch": 37.42, |
|
"grad_norm": 4.638852119445801, |
|
"learning_rate": 6.2582582582582585e-06, |
|
"loss": 0.4214, |
|
"step": 12460 |
|
}, |
|
{ |
|
"epoch": 37.45, |
|
"grad_norm": 3.8956124782562256, |
|
"learning_rate": 6.255255255255256e-06, |
|
"loss": 0.5519, |
|
"step": 12470 |
|
}, |
|
{ |
|
"epoch": 37.48, |
|
"grad_norm": 6.023074150085449, |
|
"learning_rate": 6.2522522522522535e-06, |
|
"loss": 0.5331, |
|
"step": 12480 |
|
}, |
|
{ |
|
"epoch": 37.51, |
|
"grad_norm": 5.346560001373291, |
|
"learning_rate": 6.249249249249249e-06, |
|
"loss": 0.4302, |
|
"step": 12490 |
|
}, |
|
{ |
|
"epoch": 37.54, |
|
"grad_norm": 3.9033756256103516, |
|
"learning_rate": 6.246246246246247e-06, |
|
"loss": 0.4819, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 37.57, |
|
"grad_norm": 4.675529479980469, |
|
"learning_rate": 6.243243243243243e-06, |
|
"loss": 0.4787, |
|
"step": 12510 |
|
}, |
|
{ |
|
"epoch": 37.6, |
|
"grad_norm": 4.389764308929443, |
|
"learning_rate": 6.240240240240241e-06, |
|
"loss": 0.5223, |
|
"step": 12520 |
|
}, |
|
{ |
|
"epoch": 37.63, |
|
"grad_norm": 4.986979007720947, |
|
"learning_rate": 6.237237237237238e-06, |
|
"loss": 0.5087, |
|
"step": 12530 |
|
}, |
|
{ |
|
"epoch": 37.66, |
|
"grad_norm": 5.554018020629883, |
|
"learning_rate": 6.234234234234234e-06, |
|
"loss": 0.4889, |
|
"step": 12540 |
|
}, |
|
{ |
|
"epoch": 37.69, |
|
"grad_norm": 4.088570594787598, |
|
"learning_rate": 6.2312312312312316e-06, |
|
"loss": 0.4818, |
|
"step": 12550 |
|
}, |
|
{ |
|
"epoch": 37.72, |
|
"grad_norm": 4.196356773376465, |
|
"learning_rate": 6.228228228228229e-06, |
|
"loss": 0.4197, |
|
"step": 12560 |
|
}, |
|
{ |
|
"epoch": 37.75, |
|
"grad_norm": 4.250049591064453, |
|
"learning_rate": 6.225225225225226e-06, |
|
"loss": 0.45, |
|
"step": 12570 |
|
}, |
|
{ |
|
"epoch": 37.78, |
|
"grad_norm": 3.9590983390808105, |
|
"learning_rate": 6.222222222222223e-06, |
|
"loss": 0.4352, |
|
"step": 12580 |
|
}, |
|
{ |
|
"epoch": 37.81, |
|
"grad_norm": 4.1846442222595215, |
|
"learning_rate": 6.219219219219221e-06, |
|
"loss": 0.4968, |
|
"step": 12590 |
|
}, |
|
{ |
|
"epoch": 37.84, |
|
"grad_norm": 4.758632183074951, |
|
"learning_rate": 6.2162162162162164e-06, |
|
"loss": 0.4516, |
|
"step": 12600 |
|
}, |
|
{ |
|
"epoch": 37.87, |
|
"grad_norm": 4.275233745574951, |
|
"learning_rate": 6.213213213213214e-06, |
|
"loss": 0.4844, |
|
"step": 12610 |
|
}, |
|
{ |
|
"epoch": 37.9, |
|
"grad_norm": 4.182071685791016, |
|
"learning_rate": 6.2102102102102105e-06, |
|
"loss": 0.4612, |
|
"step": 12620 |
|
}, |
|
{ |
|
"epoch": 37.93, |
|
"grad_norm": 4.859713077545166, |
|
"learning_rate": 6.207207207207208e-06, |
|
"loss": 0.5172, |
|
"step": 12630 |
|
}, |
|
{ |
|
"epoch": 37.96, |
|
"grad_norm": 4.407437801361084, |
|
"learning_rate": 6.204204204204205e-06, |
|
"loss": 0.5203, |
|
"step": 12640 |
|
}, |
|
{ |
|
"epoch": 37.99, |
|
"grad_norm": 5.01877498626709, |
|
"learning_rate": 6.201201201201201e-06, |
|
"loss": 0.5554, |
|
"step": 12650 |
|
}, |
|
{ |
|
"epoch": 38.0, |
|
"eval_accuracy": 0.9196, |
|
"eval_loss": 0.31083497405052185, |
|
"eval_runtime": 26.225, |
|
"eval_samples_per_second": 381.315, |
|
"eval_steps_per_second": 1.525, |
|
"step": 12654 |
|
}, |
|
{ |
|
"epoch": 38.02, |
|
"grad_norm": 6.460936546325684, |
|
"learning_rate": 6.198198198198199e-06, |
|
"loss": 0.4583, |
|
"step": 12660 |
|
}, |
|
{ |
|
"epoch": 38.05, |
|
"grad_norm": 5.002228260040283, |
|
"learning_rate": 6.195195195195196e-06, |
|
"loss": 0.5129, |
|
"step": 12670 |
|
}, |
|
{ |
|
"epoch": 38.08, |
|
"grad_norm": 5.627586841583252, |
|
"learning_rate": 6.192192192192192e-06, |
|
"loss": 0.4622, |
|
"step": 12680 |
|
}, |
|
{ |
|
"epoch": 38.11, |
|
"grad_norm": 3.3762929439544678, |
|
"learning_rate": 6.1891891891891895e-06, |
|
"loss": 0.4751, |
|
"step": 12690 |
|
}, |
|
{ |
|
"epoch": 38.14, |
|
"grad_norm": 5.8351545333862305, |
|
"learning_rate": 6.186186186186187e-06, |
|
"loss": 0.4964, |
|
"step": 12700 |
|
}, |
|
{ |
|
"epoch": 38.17, |
|
"grad_norm": 3.943376302719116, |
|
"learning_rate": 6.183183183183184e-06, |
|
"loss": 0.4533, |
|
"step": 12710 |
|
}, |
|
{ |
|
"epoch": 38.2, |
|
"grad_norm": 5.2256269454956055, |
|
"learning_rate": 6.180180180180181e-06, |
|
"loss": 0.5179, |
|
"step": 12720 |
|
}, |
|
{ |
|
"epoch": 38.23, |
|
"grad_norm": 4.85332727432251, |
|
"learning_rate": 6.177177177177177e-06, |
|
"loss": 0.4436, |
|
"step": 12730 |
|
}, |
|
{ |
|
"epoch": 38.26, |
|
"grad_norm": 32.34022521972656, |
|
"learning_rate": 6.174174174174174e-06, |
|
"loss": 0.4833, |
|
"step": 12740 |
|
}, |
|
{ |
|
"epoch": 38.29, |
|
"grad_norm": 5.060525417327881, |
|
"learning_rate": 6.171171171171172e-06, |
|
"loss": 0.4905, |
|
"step": 12750 |
|
}, |
|
{ |
|
"epoch": 38.32, |
|
"grad_norm": 6.252566814422607, |
|
"learning_rate": 6.1681681681681685e-06, |
|
"loss": 0.4897, |
|
"step": 12760 |
|
}, |
|
{ |
|
"epoch": 38.35, |
|
"grad_norm": 5.269845485687256, |
|
"learning_rate": 6.165165165165166e-06, |
|
"loss": 0.44, |
|
"step": 12770 |
|
}, |
|
{ |
|
"epoch": 38.38, |
|
"grad_norm": 4.707264423370361, |
|
"learning_rate": 6.162162162162163e-06, |
|
"loss": 0.4289, |
|
"step": 12780 |
|
}, |
|
{ |
|
"epoch": 38.41, |
|
"grad_norm": 4.847848892211914, |
|
"learning_rate": 6.159159159159159e-06, |
|
"loss": 0.4568, |
|
"step": 12790 |
|
}, |
|
{ |
|
"epoch": 38.44, |
|
"grad_norm": 6.254144668579102, |
|
"learning_rate": 6.156156156156157e-06, |
|
"loss": 0.4907, |
|
"step": 12800 |
|
}, |
|
{ |
|
"epoch": 38.47, |
|
"grad_norm": 3.5328307151794434, |
|
"learning_rate": 6.153153153153154e-06, |
|
"loss": 0.4832, |
|
"step": 12810 |
|
}, |
|
{ |
|
"epoch": 38.5, |
|
"grad_norm": 3.0988495349884033, |
|
"learning_rate": 6.150150150150151e-06, |
|
"loss": 0.4956, |
|
"step": 12820 |
|
}, |
|
{ |
|
"epoch": 38.53, |
|
"grad_norm": 4.955842971801758, |
|
"learning_rate": 6.147147147147147e-06, |
|
"loss": 0.46, |
|
"step": 12830 |
|
}, |
|
{ |
|
"epoch": 38.56, |
|
"grad_norm": 6.138220310211182, |
|
"learning_rate": 6.144144144144145e-06, |
|
"loss": 0.494, |
|
"step": 12840 |
|
}, |
|
{ |
|
"epoch": 38.59, |
|
"grad_norm": 5.635361194610596, |
|
"learning_rate": 6.1411411411411415e-06, |
|
"loss": 0.4608, |
|
"step": 12850 |
|
}, |
|
{ |
|
"epoch": 38.62, |
|
"grad_norm": 4.4537458419799805, |
|
"learning_rate": 6.138138138138139e-06, |
|
"loss": 0.5117, |
|
"step": 12860 |
|
}, |
|
{ |
|
"epoch": 38.65, |
|
"grad_norm": 5.3862481117248535, |
|
"learning_rate": 6.135135135135135e-06, |
|
"loss": 0.4349, |
|
"step": 12870 |
|
}, |
|
{ |
|
"epoch": 38.68, |
|
"grad_norm": 4.768416404724121, |
|
"learning_rate": 6.132132132132132e-06, |
|
"loss": 0.4979, |
|
"step": 12880 |
|
}, |
|
{ |
|
"epoch": 38.71, |
|
"grad_norm": 5.998769760131836, |
|
"learning_rate": 6.12912912912913e-06, |
|
"loss": 0.5081, |
|
"step": 12890 |
|
}, |
|
{ |
|
"epoch": 38.74, |
|
"grad_norm": 5.756679534912109, |
|
"learning_rate": 6.126126126126126e-06, |
|
"loss": 0.4924, |
|
"step": 12900 |
|
}, |
|
{ |
|
"epoch": 38.77, |
|
"grad_norm": 3.7122647762298584, |
|
"learning_rate": 6.123123123123124e-06, |
|
"loss": 0.4304, |
|
"step": 12910 |
|
}, |
|
{ |
|
"epoch": 38.8, |
|
"grad_norm": 3.1792144775390625, |
|
"learning_rate": 6.120120120120121e-06, |
|
"loss": 0.4214, |
|
"step": 12920 |
|
}, |
|
{ |
|
"epoch": 38.83, |
|
"grad_norm": 4.193253993988037, |
|
"learning_rate": 6.117117117117117e-06, |
|
"loss": 0.4842, |
|
"step": 12930 |
|
}, |
|
{ |
|
"epoch": 38.86, |
|
"grad_norm": 5.543241024017334, |
|
"learning_rate": 6.114114114114115e-06, |
|
"loss": 0.4797, |
|
"step": 12940 |
|
}, |
|
{ |
|
"epoch": 38.89, |
|
"grad_norm": 5.420256614685059, |
|
"learning_rate": 6.111111111111112e-06, |
|
"loss": 0.4779, |
|
"step": 12950 |
|
}, |
|
{ |
|
"epoch": 38.92, |
|
"grad_norm": 4.960844993591309, |
|
"learning_rate": 6.108108108108109e-06, |
|
"loss": 0.4977, |
|
"step": 12960 |
|
}, |
|
{ |
|
"epoch": 38.95, |
|
"grad_norm": 4.85521936416626, |
|
"learning_rate": 6.105105105105106e-06, |
|
"loss": 0.4911, |
|
"step": 12970 |
|
}, |
|
{ |
|
"epoch": 38.98, |
|
"grad_norm": 3.604191780090332, |
|
"learning_rate": 6.102102102102102e-06, |
|
"loss": 0.4573, |
|
"step": 12980 |
|
}, |
|
{ |
|
"epoch": 39.0, |
|
"eval_accuracy": 0.9203, |
|
"eval_loss": 0.3111189901828766, |
|
"eval_runtime": 26.4985, |
|
"eval_samples_per_second": 377.38, |
|
"eval_steps_per_second": 1.51, |
|
"step": 12987 |
|
}, |
|
{ |
|
"epoch": 39.01, |
|
"grad_norm": 5.761185169219971, |
|
"learning_rate": 6.0990990990990995e-06, |
|
"loss": 0.4552, |
|
"step": 12990 |
|
}, |
|
{ |
|
"epoch": 39.04, |
|
"grad_norm": 8.790044784545898, |
|
"learning_rate": 6.096096096096097e-06, |
|
"loss": 0.4732, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 39.07, |
|
"grad_norm": 4.661570072174072, |
|
"learning_rate": 6.0930930930930936e-06, |
|
"loss": 0.4442, |
|
"step": 13010 |
|
}, |
|
{ |
|
"epoch": 39.1, |
|
"grad_norm": 4.484933376312256, |
|
"learning_rate": 6.09009009009009e-06, |
|
"loss": 0.5375, |
|
"step": 13020 |
|
}, |
|
{ |
|
"epoch": 39.13, |
|
"grad_norm": 4.309755325317383, |
|
"learning_rate": 6.087087087087088e-06, |
|
"loss": 0.465, |
|
"step": 13030 |
|
}, |
|
{ |
|
"epoch": 39.16, |
|
"grad_norm": 5.274332046508789, |
|
"learning_rate": 6.084084084084084e-06, |
|
"loss": 0.4746, |
|
"step": 13040 |
|
}, |
|
{ |
|
"epoch": 39.19, |
|
"grad_norm": 6.931982040405273, |
|
"learning_rate": 6.081081081081082e-06, |
|
"loss": 0.4746, |
|
"step": 13050 |
|
}, |
|
{ |
|
"epoch": 39.22, |
|
"grad_norm": 3.775691509246826, |
|
"learning_rate": 6.078078078078079e-06, |
|
"loss": 0.4113, |
|
"step": 13060 |
|
}, |
|
{ |
|
"epoch": 39.25, |
|
"grad_norm": 6.639736652374268, |
|
"learning_rate": 6.075075075075075e-06, |
|
"loss": 0.4563, |
|
"step": 13070 |
|
}, |
|
{ |
|
"epoch": 39.28, |
|
"grad_norm": 6.107689380645752, |
|
"learning_rate": 6.0720720720720725e-06, |
|
"loss": 0.4773, |
|
"step": 13080 |
|
}, |
|
{ |
|
"epoch": 39.31, |
|
"grad_norm": 5.7024078369140625, |
|
"learning_rate": 6.06906906906907e-06, |
|
"loss": 0.5295, |
|
"step": 13090 |
|
}, |
|
{ |
|
"epoch": 39.34, |
|
"grad_norm": 4.996414661407471, |
|
"learning_rate": 6.066066066066067e-06, |
|
"loss": 0.4752, |
|
"step": 13100 |
|
}, |
|
{ |
|
"epoch": 39.37, |
|
"grad_norm": 3.619349956512451, |
|
"learning_rate": 6.063063063063064e-06, |
|
"loss": 0.5057, |
|
"step": 13110 |
|
}, |
|
{ |
|
"epoch": 39.4, |
|
"grad_norm": 3.311629295349121, |
|
"learning_rate": 6.06006006006006e-06, |
|
"loss": 0.4678, |
|
"step": 13120 |
|
}, |
|
{ |
|
"epoch": 39.43, |
|
"grad_norm": 4.446750640869141, |
|
"learning_rate": 6.057057057057057e-06, |
|
"loss": 0.5085, |
|
"step": 13130 |
|
}, |
|
{ |
|
"epoch": 39.46, |
|
"grad_norm": 4.55859375, |
|
"learning_rate": 6.054054054054055e-06, |
|
"loss": 0.4852, |
|
"step": 13140 |
|
}, |
|
{ |
|
"epoch": 39.49, |
|
"grad_norm": 4.04985237121582, |
|
"learning_rate": 6.0510510510510515e-06, |
|
"loss": 0.4818, |
|
"step": 13150 |
|
}, |
|
{ |
|
"epoch": 39.52, |
|
"grad_norm": 5.457259178161621, |
|
"learning_rate": 6.048048048048049e-06, |
|
"loss": 0.3926, |
|
"step": 13160 |
|
}, |
|
{ |
|
"epoch": 39.55, |
|
"grad_norm": 3.2358996868133545, |
|
"learning_rate": 6.045045045045046e-06, |
|
"loss": 0.5146, |
|
"step": 13170 |
|
}, |
|
{ |
|
"epoch": 39.58, |
|
"grad_norm": 3.3737552165985107, |
|
"learning_rate": 6.042042042042042e-06, |
|
"loss": 0.4892, |
|
"step": 13180 |
|
}, |
|
{ |
|
"epoch": 39.61, |
|
"grad_norm": 3.9016125202178955, |
|
"learning_rate": 6.03903903903904e-06, |
|
"loss": 0.5026, |
|
"step": 13190 |
|
}, |
|
{ |
|
"epoch": 39.64, |
|
"grad_norm": 4.943258285522461, |
|
"learning_rate": 6.036036036036037e-06, |
|
"loss": 0.5245, |
|
"step": 13200 |
|
}, |
|
{ |
|
"epoch": 39.67, |
|
"grad_norm": 5.694433689117432, |
|
"learning_rate": 6.033033033033033e-06, |
|
"loss": 0.5168, |
|
"step": 13210 |
|
}, |
|
{ |
|
"epoch": 39.7, |
|
"grad_norm": 3.50801420211792, |
|
"learning_rate": 6.0300300300300304e-06, |
|
"loss": 0.5069, |
|
"step": 13220 |
|
}, |
|
{ |
|
"epoch": 39.73, |
|
"grad_norm": 5.018118381500244, |
|
"learning_rate": 6.027027027027027e-06, |
|
"loss": 0.4794, |
|
"step": 13230 |
|
}, |
|
{ |
|
"epoch": 39.76, |
|
"grad_norm": 3.6679513454437256, |
|
"learning_rate": 6.0240240240240246e-06, |
|
"loss": 0.4994, |
|
"step": 13240 |
|
}, |
|
{ |
|
"epoch": 39.79, |
|
"grad_norm": 4.077887058258057, |
|
"learning_rate": 6.021021021021022e-06, |
|
"loss": 0.5254, |
|
"step": 13250 |
|
}, |
|
{ |
|
"epoch": 39.82, |
|
"grad_norm": 5.2488274574279785, |
|
"learning_rate": 6.018018018018018e-06, |
|
"loss": 0.4703, |
|
"step": 13260 |
|
}, |
|
{ |
|
"epoch": 39.85, |
|
"grad_norm": 4.226322174072266, |
|
"learning_rate": 6.015015015015015e-06, |
|
"loss": 0.5161, |
|
"step": 13270 |
|
}, |
|
{ |
|
"epoch": 39.88, |
|
"grad_norm": 4.291458606719971, |
|
"learning_rate": 6.012012012012013e-06, |
|
"loss": 0.4265, |
|
"step": 13280 |
|
}, |
|
{ |
|
"epoch": 39.91, |
|
"grad_norm": 4.763695240020752, |
|
"learning_rate": 6.009009009009009e-06, |
|
"loss": 0.4394, |
|
"step": 13290 |
|
}, |
|
{ |
|
"epoch": 39.94, |
|
"grad_norm": 5.397831916809082, |
|
"learning_rate": 6.006006006006007e-06, |
|
"loss": 0.4785, |
|
"step": 13300 |
|
}, |
|
{ |
|
"epoch": 39.97, |
|
"grad_norm": 3.8020317554473877, |
|
"learning_rate": 6.003003003003004e-06, |
|
"loss": 0.4506, |
|
"step": 13310 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"grad_norm": 0.3561044931411743, |
|
"learning_rate": 6e-06, |
|
"loss": 0.4692, |
|
"step": 13320 |
|
}, |
|
{ |
|
"epoch": 40.0, |
|
"eval_accuracy": 0.9203, |
|
"eval_loss": 0.3074478507041931, |
|
"eval_runtime": 26.6396, |
|
"eval_samples_per_second": 375.381, |
|
"eval_steps_per_second": 1.502, |
|
"step": 13320 |
|
}, |
|
{ |
|
"epoch": 40.03, |
|
"grad_norm": 4.700529098510742, |
|
"learning_rate": 5.996996996996998e-06, |
|
"loss": 0.5115, |
|
"step": 13330 |
|
}, |
|
{ |
|
"epoch": 40.06, |
|
"grad_norm": 6.283649921417236, |
|
"learning_rate": 5.993993993993994e-06, |
|
"loss": 0.5058, |
|
"step": 13340 |
|
}, |
|
{ |
|
"epoch": 40.09, |
|
"grad_norm": 4.070054054260254, |
|
"learning_rate": 5.990990990990992e-06, |
|
"loss": 0.4418, |
|
"step": 13350 |
|
}, |
|
{ |
|
"epoch": 40.12, |
|
"grad_norm": 5.326721668243408, |
|
"learning_rate": 5.987987987987988e-06, |
|
"loss": 0.526, |
|
"step": 13360 |
|
}, |
|
{ |
|
"epoch": 40.15, |
|
"grad_norm": 4.9221720695495605, |
|
"learning_rate": 5.984984984984985e-06, |
|
"loss": 0.4852, |
|
"step": 13370 |
|
}, |
|
{ |
|
"epoch": 40.18, |
|
"grad_norm": 6.132660865783691, |
|
"learning_rate": 5.9819819819819825e-06, |
|
"loss": 0.5221, |
|
"step": 13380 |
|
}, |
|
{ |
|
"epoch": 40.21, |
|
"grad_norm": 5.296045780181885, |
|
"learning_rate": 5.97897897897898e-06, |
|
"loss": 0.4917, |
|
"step": 13390 |
|
}, |
|
{ |
|
"epoch": 40.24, |
|
"grad_norm": 5.326955318450928, |
|
"learning_rate": 5.975975975975976e-06, |
|
"loss": 0.4982, |
|
"step": 13400 |
|
}, |
|
{ |
|
"epoch": 40.27, |
|
"grad_norm": 4.939719200134277, |
|
"learning_rate": 5.972972972972973e-06, |
|
"loss": 0.4549, |
|
"step": 13410 |
|
}, |
|
{ |
|
"epoch": 40.3, |
|
"grad_norm": 5.04403829574585, |
|
"learning_rate": 5.969969969969971e-06, |
|
"loss": 0.4178, |
|
"step": 13420 |
|
}, |
|
{ |
|
"epoch": 40.33, |
|
"grad_norm": 4.405360698699951, |
|
"learning_rate": 5.966966966966967e-06, |
|
"loss": 0.4578, |
|
"step": 13430 |
|
}, |
|
{ |
|
"epoch": 40.36, |
|
"grad_norm": 4.024507999420166, |
|
"learning_rate": 5.963963963963965e-06, |
|
"loss": 0.4424, |
|
"step": 13440 |
|
}, |
|
{ |
|
"epoch": 40.39, |
|
"grad_norm": 4.401228904724121, |
|
"learning_rate": 5.960960960960962e-06, |
|
"loss": 0.5585, |
|
"step": 13450 |
|
}, |
|
{ |
|
"epoch": 40.42, |
|
"grad_norm": 3.543808698654175, |
|
"learning_rate": 5.957957957957958e-06, |
|
"loss": 0.4806, |
|
"step": 13460 |
|
}, |
|
{ |
|
"epoch": 40.45, |
|
"grad_norm": 7.205944061279297, |
|
"learning_rate": 5.9549549549549556e-06, |
|
"loss": 0.4336, |
|
"step": 13470 |
|
}, |
|
{ |
|
"epoch": 40.48, |
|
"grad_norm": 6.0269455909729, |
|
"learning_rate": 5.951951951951952e-06, |
|
"loss": 0.4664, |
|
"step": 13480 |
|
}, |
|
{ |
|
"epoch": 40.51, |
|
"grad_norm": 4.739034175872803, |
|
"learning_rate": 5.94894894894895e-06, |
|
"loss": 0.4657, |
|
"step": 13490 |
|
}, |
|
{ |
|
"epoch": 40.54, |
|
"grad_norm": 4.843464374542236, |
|
"learning_rate": 5.945945945945947e-06, |
|
"loss": 0.4424, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 40.57, |
|
"grad_norm": 3.6364638805389404, |
|
"learning_rate": 5.942942942942943e-06, |
|
"loss": 0.4873, |
|
"step": 13510 |
|
}, |
|
{ |
|
"epoch": 40.6, |
|
"grad_norm": 3.925708293914795, |
|
"learning_rate": 5.93993993993994e-06, |
|
"loss": 0.4664, |
|
"step": 13520 |
|
}, |
|
{ |
|
"epoch": 40.63, |
|
"grad_norm": 4.003354072570801, |
|
"learning_rate": 5.936936936936938e-06, |
|
"loss": 0.4738, |
|
"step": 13530 |
|
}, |
|
{ |
|
"epoch": 40.66, |
|
"grad_norm": 3.8031704425811768, |
|
"learning_rate": 5.9339339339339345e-06, |
|
"loss": 0.4573, |
|
"step": 13540 |
|
}, |
|
{ |
|
"epoch": 40.69, |
|
"grad_norm": 4.621785640716553, |
|
"learning_rate": 5.930930930930931e-06, |
|
"loss": 0.5061, |
|
"step": 13550 |
|
}, |
|
{ |
|
"epoch": 40.72, |
|
"grad_norm": 6.505673408508301, |
|
"learning_rate": 5.927927927927929e-06, |
|
"loss": 0.4471, |
|
"step": 13560 |
|
}, |
|
{ |
|
"epoch": 40.75, |
|
"grad_norm": 4.7050042152404785, |
|
"learning_rate": 5.924924924924925e-06, |
|
"loss": 0.4382, |
|
"step": 13570 |
|
}, |
|
{ |
|
"epoch": 40.78, |
|
"grad_norm": 3.5394630432128906, |
|
"learning_rate": 5.921921921921923e-06, |
|
"loss": 0.4156, |
|
"step": 13580 |
|
}, |
|
{ |
|
"epoch": 40.81, |
|
"grad_norm": 4.195276737213135, |
|
"learning_rate": 5.9189189189189185e-06, |
|
"loss": 0.4377, |
|
"step": 13590 |
|
}, |
|
{ |
|
"epoch": 40.84, |
|
"grad_norm": 4.865070819854736, |
|
"learning_rate": 5.915915915915916e-06, |
|
"loss": 0.4552, |
|
"step": 13600 |
|
}, |
|
{ |
|
"epoch": 40.87, |
|
"grad_norm": 4.878195762634277, |
|
"learning_rate": 5.9129129129129135e-06, |
|
"loss": 0.469, |
|
"step": 13610 |
|
}, |
|
{ |
|
"epoch": 40.9, |
|
"grad_norm": 5.631471633911133, |
|
"learning_rate": 5.90990990990991e-06, |
|
"loss": 0.4635, |
|
"step": 13620 |
|
}, |
|
{ |
|
"epoch": 40.93, |
|
"grad_norm": 3.38757586479187, |
|
"learning_rate": 5.906906906906908e-06, |
|
"loss": 0.4781, |
|
"step": 13630 |
|
}, |
|
{ |
|
"epoch": 40.96, |
|
"grad_norm": 5.4056549072265625, |
|
"learning_rate": 5.903903903903905e-06, |
|
"loss": 0.4698, |
|
"step": 13640 |
|
}, |
|
{ |
|
"epoch": 40.99, |
|
"grad_norm": 4.04258918762207, |
|
"learning_rate": 5.900900900900901e-06, |
|
"loss": 0.481, |
|
"step": 13650 |
|
}, |
|
{ |
|
"epoch": 41.0, |
|
"eval_accuracy": 0.922, |
|
"eval_loss": 0.3042304813861847, |
|
"eval_runtime": 26.1006, |
|
"eval_samples_per_second": 383.133, |
|
"eval_steps_per_second": 1.533, |
|
"step": 13653 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 33300, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 100, |
|
"save_steps": 500, |
|
"total_flos": 1.3514839437625344e+20, |
|
"train_batch_size": 128, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|