|
{ |
|
"best_metric": 0.03668233007192612, |
|
"best_model_checkpoint": "./susurro-model/checkpoint-3500", |
|
"epoch": 8.0, |
|
"eval_steps": 250, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.016, |
|
"grad_norm": 22.783082962036133, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 1.4707, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.032, |
|
"grad_norm": 15.918609619140625, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 1.1318, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.048, |
|
"grad_norm": 6.734115123748779, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 0.566, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.064, |
|
"grad_norm": 4.893101692199707, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 0.3064, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 4.515320777893066, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.1913, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.096, |
|
"grad_norm": 2.93345046043396, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 0.0643, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.112, |
|
"grad_norm": 1.9217101335525513, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 0.0534, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.128, |
|
"grad_norm": 2.2198150157928467, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 0.0547, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.144, |
|
"grad_norm": 1.7477046251296997, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 0.0376, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.2940073013305664, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.0414, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.176, |
|
"grad_norm": 2.83594012260437, |
|
"learning_rate": 4.4e-06, |
|
"loss": 0.0501, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.192, |
|
"grad_norm": 1.6579861640930176, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 0.0483, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.208, |
|
"grad_norm": 2.768068313598633, |
|
"learning_rate": 5.2e-06, |
|
"loss": 0.0504, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.224, |
|
"grad_norm": 1.8856732845306396, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 0.0452, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.343675374984741, |
|
"learning_rate": 6e-06, |
|
"loss": 0.0527, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.256, |
|
"grad_norm": 2.380032539367676, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 0.0488, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.272, |
|
"grad_norm": 2.847942352294922, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 0.0481, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.288, |
|
"grad_norm": 2.505783796310425, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 0.0525, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.304, |
|
"grad_norm": 2.4004359245300293, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 0.0475, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 2.5646846294403076, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.0541, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.336, |
|
"grad_norm": 2.2978780269622803, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 0.056, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.352, |
|
"grad_norm": 1.9104993343353271, |
|
"learning_rate": 8.8e-06, |
|
"loss": 0.0507, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.368, |
|
"grad_norm": 3.259106397628784, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 0.0599, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.384, |
|
"grad_norm": 1.9815133810043335, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 0.0531, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 2.202136278152466, |
|
"learning_rate": 1e-05, |
|
"loss": 0.0494, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"eval_loss": 0.0524754524230957, |
|
"eval_runtime": 1455.6988, |
|
"eval_samples_per_second": 3.876, |
|
"eval_steps_per_second": 3.876, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.416, |
|
"grad_norm": 2.5911672115325928, |
|
"learning_rate": 9.978947368421053e-06, |
|
"loss": 0.0512, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.432, |
|
"grad_norm": 1.6327568292617798, |
|
"learning_rate": 9.957894736842106e-06, |
|
"loss": 0.0485, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.448, |
|
"grad_norm": 2.290238380432129, |
|
"learning_rate": 9.936842105263159e-06, |
|
"loss": 0.0586, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.464, |
|
"grad_norm": 2.7893710136413574, |
|
"learning_rate": 9.915789473684211e-06, |
|
"loss": 0.0597, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 2.4395689964294434, |
|
"learning_rate": 9.894736842105264e-06, |
|
"loss": 0.0596, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.496, |
|
"grad_norm": 2.9003653526306152, |
|
"learning_rate": 9.873684210526317e-06, |
|
"loss": 0.0551, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.512, |
|
"grad_norm": 2.7941534519195557, |
|
"learning_rate": 9.85263157894737e-06, |
|
"loss": 0.0579, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.528, |
|
"grad_norm": 2.705819845199585, |
|
"learning_rate": 9.831578947368422e-06, |
|
"loss": 0.059, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.544, |
|
"grad_norm": 3.2800068855285645, |
|
"learning_rate": 9.810526315789475e-06, |
|
"loss": 0.0649, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 1.8395154476165771, |
|
"learning_rate": 9.789473684210527e-06, |
|
"loss": 0.0487, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.576, |
|
"grad_norm": 2.447082757949829, |
|
"learning_rate": 9.76842105263158e-06, |
|
"loss": 0.0552, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.592, |
|
"grad_norm": 2.2035391330718994, |
|
"learning_rate": 9.747368421052633e-06, |
|
"loss": 0.0505, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.608, |
|
"grad_norm": 3.010380506515503, |
|
"learning_rate": 9.726315789473685e-06, |
|
"loss": 0.0717, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.624, |
|
"grad_norm": 2.2279305458068848, |
|
"learning_rate": 9.705263157894738e-06, |
|
"loss": 0.0574, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 2.1078450679779053, |
|
"learning_rate": 9.68421052631579e-06, |
|
"loss": 0.0549, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.656, |
|
"grad_norm": 1.9342560768127441, |
|
"learning_rate": 9.663157894736843e-06, |
|
"loss": 0.055, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.672, |
|
"grad_norm": 2.2525532245635986, |
|
"learning_rate": 9.642105263157896e-06, |
|
"loss": 0.0562, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.688, |
|
"grad_norm": 1.9181652069091797, |
|
"learning_rate": 9.621052631578947e-06, |
|
"loss": 0.0561, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.704, |
|
"grad_norm": 2.088951826095581, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 0.0575, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 2.951388120651245, |
|
"learning_rate": 9.578947368421054e-06, |
|
"loss": 0.0503, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.736, |
|
"grad_norm": 2.5841259956359863, |
|
"learning_rate": 9.557894736842107e-06, |
|
"loss": 0.0613, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.752, |
|
"grad_norm": 3.1597883701324463, |
|
"learning_rate": 9.53684210526316e-06, |
|
"loss": 0.0594, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.768, |
|
"grad_norm": 1.4449310302734375, |
|
"learning_rate": 9.515789473684212e-06, |
|
"loss": 0.0565, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.784, |
|
"grad_norm": 1.588442325592041, |
|
"learning_rate": 9.494736842105265e-06, |
|
"loss": 0.0505, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 2.315497398376465, |
|
"learning_rate": 9.473684210526315e-06, |
|
"loss": 0.0571, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"eval_loss": 0.04724394157528877, |
|
"eval_runtime": 1481.9655, |
|
"eval_samples_per_second": 3.807, |
|
"eval_steps_per_second": 3.807, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.816, |
|
"grad_norm": 3.124136209487915, |
|
"learning_rate": 9.454736842105265e-06, |
|
"loss": 0.0439, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.832, |
|
"grad_norm": 2.687971353530884, |
|
"learning_rate": 9.433684210526318e-06, |
|
"loss": 0.0535, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.848, |
|
"grad_norm": 2.442317485809326, |
|
"learning_rate": 9.412631578947369e-06, |
|
"loss": 0.0516, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.864, |
|
"grad_norm": 2.6689493656158447, |
|
"learning_rate": 9.391578947368421e-06, |
|
"loss": 0.0565, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 1.959835171699524, |
|
"learning_rate": 9.370526315789474e-06, |
|
"loss": 0.0575, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.896, |
|
"grad_norm": 3.6005074977874756, |
|
"learning_rate": 9.349473684210526e-06, |
|
"loss": 0.0444, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.912, |
|
"grad_norm": 2.1912620067596436, |
|
"learning_rate": 9.328421052631579e-06, |
|
"loss": 0.052, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.928, |
|
"grad_norm": 2.148808240890503, |
|
"learning_rate": 9.307368421052634e-06, |
|
"loss": 0.0555, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.944, |
|
"grad_norm": 2.872231960296631, |
|
"learning_rate": 9.286315789473686e-06, |
|
"loss": 0.054, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 3.3271450996398926, |
|
"learning_rate": 9.265263157894737e-06, |
|
"loss": 0.0554, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.976, |
|
"grad_norm": 2.058793067932129, |
|
"learning_rate": 9.24421052631579e-06, |
|
"loss": 0.0418, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.992, |
|
"grad_norm": 2.0928821563720703, |
|
"learning_rate": 9.225263157894737e-06, |
|
"loss": 0.0525, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.008, |
|
"grad_norm": 1.410493016242981, |
|
"learning_rate": 9.20421052631579e-06, |
|
"loss": 0.0448, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.024, |
|
"grad_norm": 1.4376940727233887, |
|
"learning_rate": 9.183157894736843e-06, |
|
"loss": 0.0281, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 1.678579568862915, |
|
"learning_rate": 9.162105263157895e-06, |
|
"loss": 0.0207, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.056, |
|
"grad_norm": 2.18996524810791, |
|
"learning_rate": 9.141052631578948e-06, |
|
"loss": 0.0288, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.072, |
|
"grad_norm": 1.0160465240478516, |
|
"learning_rate": 9.12e-06, |
|
"loss": 0.0199, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.088, |
|
"grad_norm": 1.4741309881210327, |
|
"learning_rate": 9.098947368421053e-06, |
|
"loss": 0.0248, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.104, |
|
"grad_norm": 1.5385643243789673, |
|
"learning_rate": 9.077894736842106e-06, |
|
"loss": 0.0286, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 1.65945565700531, |
|
"learning_rate": 9.056842105263159e-06, |
|
"loss": 0.0331, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.1360000000000001, |
|
"grad_norm": 2.443169593811035, |
|
"learning_rate": 9.035789473684211e-06, |
|
"loss": 0.0328, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.152, |
|
"grad_norm": 1.950245976448059, |
|
"learning_rate": 9.014736842105264e-06, |
|
"loss": 0.0297, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.168, |
|
"grad_norm": 1.0469032526016235, |
|
"learning_rate": 8.993684210526317e-06, |
|
"loss": 0.0282, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.184, |
|
"grad_norm": 1.5326778888702393, |
|
"learning_rate": 8.97263157894737e-06, |
|
"loss": 0.0307, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.976245105266571, |
|
"learning_rate": 8.951578947368422e-06, |
|
"loss": 0.0293, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"eval_loss": 0.04405752569437027, |
|
"eval_runtime": 1500.9354, |
|
"eval_samples_per_second": 3.759, |
|
"eval_steps_per_second": 3.759, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.216, |
|
"grad_norm": 1.4788917303085327, |
|
"learning_rate": 8.930526315789475e-06, |
|
"loss": 0.0341, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.232, |
|
"grad_norm": 1.398728847503662, |
|
"learning_rate": 8.909473684210527e-06, |
|
"loss": 0.0301, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.248, |
|
"grad_norm": 1.7809382677078247, |
|
"learning_rate": 8.88842105263158e-06, |
|
"loss": 0.0265, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.264, |
|
"grad_norm": 1.9043110609054565, |
|
"learning_rate": 8.867368421052633e-06, |
|
"loss": 0.0277, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 2.286780595779419, |
|
"learning_rate": 8.846315789473685e-06, |
|
"loss": 0.0241, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.296, |
|
"grad_norm": 2.867587089538574, |
|
"learning_rate": 8.825263157894738e-06, |
|
"loss": 0.0288, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.312, |
|
"grad_norm": 1.6300053596496582, |
|
"learning_rate": 8.80421052631579e-06, |
|
"loss": 0.0229, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.328, |
|
"grad_norm": 2.1260311603546143, |
|
"learning_rate": 8.783157894736842e-06, |
|
"loss": 0.034, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.3439999999999999, |
|
"grad_norm": 1.2183325290679932, |
|
"learning_rate": 8.762105263157896e-06, |
|
"loss": 0.0261, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.3599999999999999, |
|
"grad_norm": 1.4144467115402222, |
|
"learning_rate": 8.741052631578949e-06, |
|
"loss": 0.0258, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.376, |
|
"grad_norm": 1.658300757408142, |
|
"learning_rate": 8.720000000000001e-06, |
|
"loss": 0.0286, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.392, |
|
"grad_norm": 1.566178321838379, |
|
"learning_rate": 8.698947368421054e-06, |
|
"loss": 0.0259, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.408, |
|
"grad_norm": 1.7910547256469727, |
|
"learning_rate": 8.677894736842107e-06, |
|
"loss": 0.024, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.424, |
|
"grad_norm": 1.882262110710144, |
|
"learning_rate": 8.65684210526316e-06, |
|
"loss": 0.0312, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 1.2737823724746704, |
|
"learning_rate": 8.63578947368421e-06, |
|
"loss": 0.0324, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.456, |
|
"grad_norm": 1.954757809638977, |
|
"learning_rate": 8.614736842105263e-06, |
|
"loss": 0.0276, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.472, |
|
"grad_norm": 2.4381039142608643, |
|
"learning_rate": 8.593684210526315e-06, |
|
"loss": 0.0258, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.488, |
|
"grad_norm": 1.962517499923706, |
|
"learning_rate": 8.57263157894737e-06, |
|
"loss": 0.0357, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.504, |
|
"grad_norm": 1.7667138576507568, |
|
"learning_rate": 8.551578947368422e-06, |
|
"loss": 0.0358, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 1.6980652809143066, |
|
"learning_rate": 8.530526315789475e-06, |
|
"loss": 0.0247, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.536, |
|
"grad_norm": 1.765106201171875, |
|
"learning_rate": 8.509473684210528e-06, |
|
"loss": 0.0289, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.552, |
|
"grad_norm": 2.0965826511383057, |
|
"learning_rate": 8.488421052631579e-06, |
|
"loss": 0.0293, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.568, |
|
"grad_norm": 1.2776910066604614, |
|
"learning_rate": 8.467368421052631e-06, |
|
"loss": 0.0248, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.584, |
|
"grad_norm": 1.9160679578781128, |
|
"learning_rate": 8.446315789473684e-06, |
|
"loss": 0.0327, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 2.084198474884033, |
|
"learning_rate": 8.425263157894737e-06, |
|
"loss": 0.0248, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"eval_loss": 0.04179142042994499, |
|
"eval_runtime": 1520.1889, |
|
"eval_samples_per_second": 3.711, |
|
"eval_steps_per_second": 3.711, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.616, |
|
"grad_norm": 1.8233721256256104, |
|
"learning_rate": 8.404210526315791e-06, |
|
"loss": 0.0325, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.6320000000000001, |
|
"grad_norm": 2.6856882572174072, |
|
"learning_rate": 8.383157894736844e-06, |
|
"loss": 0.0345, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.6480000000000001, |
|
"grad_norm": 1.6194241046905518, |
|
"learning_rate": 8.362105263157896e-06, |
|
"loss": 0.0248, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.6640000000000001, |
|
"grad_norm": 2.0623207092285156, |
|
"learning_rate": 8.341052631578947e-06, |
|
"loss": 0.0297, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.6800000000000002, |
|
"grad_norm": 2.122952461242676, |
|
"learning_rate": 8.32e-06, |
|
"loss": 0.0346, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.696, |
|
"grad_norm": 1.4452091455459595, |
|
"learning_rate": 8.298947368421053e-06, |
|
"loss": 0.03, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.712, |
|
"grad_norm": 2.9357564449310303, |
|
"learning_rate": 8.277894736842105e-06, |
|
"loss": 0.026, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.728, |
|
"grad_norm": 1.0874323844909668, |
|
"learning_rate": 8.256842105263158e-06, |
|
"loss": 0.0294, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.744, |
|
"grad_norm": 1.1625168323516846, |
|
"learning_rate": 8.235789473684212e-06, |
|
"loss": 0.0291, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 1.3480169773101807, |
|
"learning_rate": 8.214736842105265e-06, |
|
"loss": 0.034, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.776, |
|
"grad_norm": 1.1702100038528442, |
|
"learning_rate": 8.193684210526316e-06, |
|
"loss": 0.029, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.792, |
|
"grad_norm": 2.0428359508514404, |
|
"learning_rate": 8.172631578947369e-06, |
|
"loss": 0.023, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.808, |
|
"grad_norm": 0.7537345886230469, |
|
"learning_rate": 8.151578947368421e-06, |
|
"loss": 0.03, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.8239999999999998, |
|
"grad_norm": 2.184401273727417, |
|
"learning_rate": 8.130526315789474e-06, |
|
"loss": 0.0327, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.8399999999999999, |
|
"grad_norm": 2.1545660495758057, |
|
"learning_rate": 8.109473684210527e-06, |
|
"loss": 0.0275, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.8559999999999999, |
|
"grad_norm": 2.535369396209717, |
|
"learning_rate": 8.08842105263158e-06, |
|
"loss": 0.0292, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.8719999999999999, |
|
"grad_norm": 1.8030229806900024, |
|
"learning_rate": 8.067368421052632e-06, |
|
"loss": 0.0264, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.888, |
|
"grad_norm": 1.8990155458450317, |
|
"learning_rate": 8.046315789473686e-06, |
|
"loss": 0.0365, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.904, |
|
"grad_norm": 1.4508986473083496, |
|
"learning_rate": 8.025263157894737e-06, |
|
"loss": 0.0251, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 1.0793194770812988, |
|
"learning_rate": 8.00421052631579e-06, |
|
"loss": 0.0274, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.936, |
|
"grad_norm": 1.9805344343185425, |
|
"learning_rate": 7.983157894736842e-06, |
|
"loss": 0.0299, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.952, |
|
"grad_norm": 2.258500576019287, |
|
"learning_rate": 7.962105263157895e-06, |
|
"loss": 0.0266, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.968, |
|
"grad_norm": 1.4639638662338257, |
|
"learning_rate": 7.941052631578948e-06, |
|
"loss": 0.026, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.984, |
|
"grad_norm": 1.3057489395141602, |
|
"learning_rate": 7.92e-06, |
|
"loss": 0.0305, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 1.29533052444458, |
|
"learning_rate": 7.898947368421053e-06, |
|
"loss": 0.0265, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 0.04163273423910141, |
|
"eval_runtime": 1542.8649, |
|
"eval_samples_per_second": 3.657, |
|
"eval_steps_per_second": 3.657, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.016, |
|
"grad_norm": 0.5275375843048096, |
|
"learning_rate": 7.877894736842106e-06, |
|
"loss": 0.0117, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.032, |
|
"grad_norm": 2.0590779781341553, |
|
"learning_rate": 7.856842105263158e-06, |
|
"loss": 0.0156, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.048, |
|
"grad_norm": 1.2595654726028442, |
|
"learning_rate": 7.835789473684211e-06, |
|
"loss": 0.0105, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.064, |
|
"grad_norm": 1.605797529220581, |
|
"learning_rate": 7.814736842105264e-06, |
|
"loss": 0.0176, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 1.2974120378494263, |
|
"learning_rate": 7.793684210526316e-06, |
|
"loss": 0.0196, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.096, |
|
"grad_norm": 3.662148952484131, |
|
"learning_rate": 7.772631578947369e-06, |
|
"loss": 0.0144, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.112, |
|
"grad_norm": 0.9286357164382935, |
|
"learning_rate": 7.751578947368422e-06, |
|
"loss": 0.0126, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.128, |
|
"grad_norm": 1.4938029050827026, |
|
"learning_rate": 7.730526315789474e-06, |
|
"loss": 0.0181, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.144, |
|
"grad_norm": 1.9004080295562744, |
|
"learning_rate": 7.709473684210527e-06, |
|
"loss": 0.0192, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 0.7749639749526978, |
|
"learning_rate": 7.68842105263158e-06, |
|
"loss": 0.0146, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.176, |
|
"grad_norm": 1.69952392578125, |
|
"learning_rate": 7.667368421052632e-06, |
|
"loss": 0.0169, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.192, |
|
"grad_norm": 0.6169717311859131, |
|
"learning_rate": 7.646315789473685e-06, |
|
"loss": 0.0102, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.208, |
|
"grad_norm": 3.5800750255584717, |
|
"learning_rate": 7.6252631578947376e-06, |
|
"loss": 0.014, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.224, |
|
"grad_norm": 0.6337082982063293, |
|
"learning_rate": 7.60421052631579e-06, |
|
"loss": 0.0161, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 0.7504271268844604, |
|
"learning_rate": 7.583157894736842e-06, |
|
"loss": 0.0145, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.2560000000000002, |
|
"grad_norm": 0.6736518740653992, |
|
"learning_rate": 7.562105263157895e-06, |
|
"loss": 0.0105, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.2720000000000002, |
|
"grad_norm": 1.1138242483139038, |
|
"learning_rate": 7.541052631578948e-06, |
|
"loss": 0.0136, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.288, |
|
"grad_norm": 1.6553155183792114, |
|
"learning_rate": 7.520000000000001e-06, |
|
"loss": 0.0135, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.304, |
|
"grad_norm": 1.3883057832717896, |
|
"learning_rate": 7.4989473684210535e-06, |
|
"loss": 0.0136, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 1.3742883205413818, |
|
"learning_rate": 7.477894736842106e-06, |
|
"loss": 0.0146, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.336, |
|
"grad_norm": 2.129136562347412, |
|
"learning_rate": 7.456842105263159e-06, |
|
"loss": 0.0142, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.352, |
|
"grad_norm": 1.771249532699585, |
|
"learning_rate": 7.435789473684211e-06, |
|
"loss": 0.0182, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.368, |
|
"grad_norm": 0.906812846660614, |
|
"learning_rate": 7.414736842105263e-06, |
|
"loss": 0.0127, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.384, |
|
"grad_norm": 1.0035991668701172, |
|
"learning_rate": 7.393684210526316e-06, |
|
"loss": 0.0148, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 1.0261139869689941, |
|
"learning_rate": 7.3726315789473694e-06, |
|
"loss": 0.0162, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"eval_loss": 0.04060380160808563, |
|
"eval_runtime": 1542.3166, |
|
"eval_samples_per_second": 3.658, |
|
"eval_steps_per_second": 3.658, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.416, |
|
"grad_norm": 0.9582253098487854, |
|
"learning_rate": 7.351578947368422e-06, |
|
"loss": 0.0139, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 2.432, |
|
"grad_norm": 1.6380131244659424, |
|
"learning_rate": 7.330526315789475e-06, |
|
"loss": 0.0175, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 2.448, |
|
"grad_norm": 2.057828426361084, |
|
"learning_rate": 7.309473684210527e-06, |
|
"loss": 0.0168, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 2.464, |
|
"grad_norm": 1.6444942951202393, |
|
"learning_rate": 7.288421052631579e-06, |
|
"loss": 0.0151, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 0.5916880369186401, |
|
"learning_rate": 7.267368421052632e-06, |
|
"loss": 0.0184, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.496, |
|
"grad_norm": 1.4819905757904053, |
|
"learning_rate": 7.2463157894736845e-06, |
|
"loss": 0.0215, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 2.512, |
|
"grad_norm": 0.944874107837677, |
|
"learning_rate": 7.225263157894737e-06, |
|
"loss": 0.0146, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 2.528, |
|
"grad_norm": 1.1218395233154297, |
|
"learning_rate": 7.20421052631579e-06, |
|
"loss": 0.0155, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 2.544, |
|
"grad_norm": 1.7295416593551636, |
|
"learning_rate": 7.183157894736843e-06, |
|
"loss": 0.0149, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 2.064664602279663, |
|
"learning_rate": 7.162105263157896e-06, |
|
"loss": 0.0209, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.576, |
|
"grad_norm": 1.4919706583023071, |
|
"learning_rate": 7.141052631578948e-06, |
|
"loss": 0.0174, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.592, |
|
"grad_norm": 1.8185276985168457, |
|
"learning_rate": 7.1200000000000004e-06, |
|
"loss": 0.0153, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.608, |
|
"grad_norm": 1.0406450033187866, |
|
"learning_rate": 7.098947368421053e-06, |
|
"loss": 0.017, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.624, |
|
"grad_norm": 2.1542301177978516, |
|
"learning_rate": 7.077894736842106e-06, |
|
"loss": 0.0171, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 1.3869684934616089, |
|
"learning_rate": 7.056842105263158e-06, |
|
"loss": 0.0141, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.656, |
|
"grad_norm": 1.0020642280578613, |
|
"learning_rate": 7.035789473684211e-06, |
|
"loss": 0.0176, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 2.672, |
|
"grad_norm": 2.786790370941162, |
|
"learning_rate": 7.0147368421052646e-06, |
|
"loss": 0.0132, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 2.6879999999999997, |
|
"grad_norm": 1.0302237272262573, |
|
"learning_rate": 6.9936842105263155e-06, |
|
"loss": 0.0139, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 2.7039999999999997, |
|
"grad_norm": 1.0650746822357178, |
|
"learning_rate": 6.972631578947369e-06, |
|
"loss": 0.0137, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 2.7199999999999998, |
|
"grad_norm": 1.5508835315704346, |
|
"learning_rate": 6.951578947368422e-06, |
|
"loss": 0.0151, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.7359999999999998, |
|
"grad_norm": 0.7046643495559692, |
|
"learning_rate": 6.930526315789474e-06, |
|
"loss": 0.0127, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 2.752, |
|
"grad_norm": 1.3175015449523926, |
|
"learning_rate": 6.909473684210527e-06, |
|
"loss": 0.015, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 2.768, |
|
"grad_norm": 2.0063388347625732, |
|
"learning_rate": 6.88842105263158e-06, |
|
"loss": 0.0143, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 2.784, |
|
"grad_norm": 1.5595343112945557, |
|
"learning_rate": 6.867368421052632e-06, |
|
"loss": 0.0175, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 1.6928199529647827, |
|
"learning_rate": 6.846315789473684e-06, |
|
"loss": 0.0147, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"eval_loss": 0.03860897570848465, |
|
"eval_runtime": 9405.2396, |
|
"eval_samples_per_second": 0.6, |
|
"eval_steps_per_second": 0.6, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.816, |
|
"grad_norm": 2.1976370811462402, |
|
"learning_rate": 6.825263157894737e-06, |
|
"loss": 0.0158, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 2.832, |
|
"grad_norm": 1.3669664859771729, |
|
"learning_rate": 6.804210526315789e-06, |
|
"loss": 0.0213, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 2.848, |
|
"grad_norm": 1.8074935674667358, |
|
"learning_rate": 6.783157894736843e-06, |
|
"loss": 0.018, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 2.864, |
|
"grad_norm": 0.8959200382232666, |
|
"learning_rate": 6.7621052631578956e-06, |
|
"loss": 0.0137, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 1.1795583963394165, |
|
"learning_rate": 6.741052631578948e-06, |
|
"loss": 0.0157, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.896, |
|
"grad_norm": 1.4148980379104614, |
|
"learning_rate": 6.720000000000001e-06, |
|
"loss": 0.0174, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 2.912, |
|
"grad_norm": 0.8638339042663574, |
|
"learning_rate": 6.698947368421053e-06, |
|
"loss": 0.0209, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 2.928, |
|
"grad_norm": 0.9411534667015076, |
|
"learning_rate": 6.677894736842105e-06, |
|
"loss": 0.0114, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 2.944, |
|
"grad_norm": 0.6806014180183411, |
|
"learning_rate": 6.656842105263158e-06, |
|
"loss": 0.02, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 0.9796069264411926, |
|
"learning_rate": 6.635789473684211e-06, |
|
"loss": 0.0116, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.976, |
|
"grad_norm": 3.364715814590454, |
|
"learning_rate": 6.614736842105264e-06, |
|
"loss": 0.0132, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 2.992, |
|
"grad_norm": 0.729645848274231, |
|
"learning_rate": 6.593684210526317e-06, |
|
"loss": 0.0147, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 3.008, |
|
"grad_norm": 2.3798277378082275, |
|
"learning_rate": 6.5726315789473695e-06, |
|
"loss": 0.0174, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 3.024, |
|
"grad_norm": 0.6777742505073547, |
|
"learning_rate": 6.551578947368421e-06, |
|
"loss": 0.0105, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 1.1942986249923706, |
|
"learning_rate": 6.530526315789474e-06, |
|
"loss": 0.0098, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 3.056, |
|
"grad_norm": 0.5124061107635498, |
|
"learning_rate": 6.509473684210527e-06, |
|
"loss": 0.0122, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 3.072, |
|
"grad_norm": 0.9624982476234436, |
|
"learning_rate": 6.488421052631579e-06, |
|
"loss": 0.0075, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 3.088, |
|
"grad_norm": 0.6776851415634155, |
|
"learning_rate": 6.467368421052632e-06, |
|
"loss": 0.0078, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 3.104, |
|
"grad_norm": 0.4030074179172516, |
|
"learning_rate": 6.4463157894736845e-06, |
|
"loss": 0.0078, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 0.5822834372520447, |
|
"learning_rate": 6.425263157894738e-06, |
|
"loss": 0.0058, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 3.136, |
|
"grad_norm": 0.23138459026813507, |
|
"learning_rate": 6.404210526315791e-06, |
|
"loss": 0.0062, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 3.152, |
|
"grad_norm": 1.4345389604568481, |
|
"learning_rate": 6.3831578947368425e-06, |
|
"loss": 0.009, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 3.168, |
|
"grad_norm": 1.3896571397781372, |
|
"learning_rate": 6.362105263157895e-06, |
|
"loss": 0.0091, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 3.184, |
|
"grad_norm": 0.7762678861618042, |
|
"learning_rate": 6.341052631578948e-06, |
|
"loss": 0.0078, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 0.7915733456611633, |
|
"learning_rate": 6.3200000000000005e-06, |
|
"loss": 0.0061, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"eval_loss": 0.039765432476997375, |
|
"eval_runtime": 1647.5775, |
|
"eval_samples_per_second": 3.424, |
|
"eval_steps_per_second": 3.424, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 3.216, |
|
"grad_norm": 0.6915295124053955, |
|
"learning_rate": 6.298947368421053e-06, |
|
"loss": 0.0072, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 3.232, |
|
"grad_norm": 1.2451772689819336, |
|
"learning_rate": 6.277894736842106e-06, |
|
"loss": 0.0085, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 3.248, |
|
"grad_norm": 0.7011457085609436, |
|
"learning_rate": 6.256842105263159e-06, |
|
"loss": 0.0095, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 3.2640000000000002, |
|
"grad_norm": 0.3362152874469757, |
|
"learning_rate": 6.23578947368421e-06, |
|
"loss": 0.0068, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 3.2800000000000002, |
|
"grad_norm": 2.8105270862579346, |
|
"learning_rate": 6.214736842105264e-06, |
|
"loss": 0.0065, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 3.296, |
|
"grad_norm": 0.5386505722999573, |
|
"learning_rate": 6.193684210526316e-06, |
|
"loss": 0.0064, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 3.312, |
|
"grad_norm": 1.7395727634429932, |
|
"learning_rate": 6.172631578947369e-06, |
|
"loss": 0.0054, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 3.328, |
|
"grad_norm": 0.7333295345306396, |
|
"learning_rate": 6.151578947368422e-06, |
|
"loss": 0.0074, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 3.344, |
|
"grad_norm": 0.36293238401412964, |
|
"learning_rate": 6.130526315789474e-06, |
|
"loss": 0.0083, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 0.7955753207206726, |
|
"learning_rate": 6.109473684210527e-06, |
|
"loss": 0.0057, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 3.376, |
|
"grad_norm": 1.8287686109542847, |
|
"learning_rate": 6.088421052631579e-06, |
|
"loss": 0.0071, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 3.392, |
|
"grad_norm": 0.6369631886482239, |
|
"learning_rate": 6.0673684210526315e-06, |
|
"loss": 0.0073, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 3.408, |
|
"grad_norm": 0.9539582133293152, |
|
"learning_rate": 6.046315789473685e-06, |
|
"loss": 0.0114, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 3.424, |
|
"grad_norm": 0.8086543083190918, |
|
"learning_rate": 6.025263157894738e-06, |
|
"loss": 0.0099, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"grad_norm": 0.40894851088523865, |
|
"learning_rate": 6.00421052631579e-06, |
|
"loss": 0.0066, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 3.456, |
|
"grad_norm": 0.39193499088287354, |
|
"learning_rate": 5.983157894736843e-06, |
|
"loss": 0.0044, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 3.472, |
|
"grad_norm": 1.0509319305419922, |
|
"learning_rate": 5.962105263157896e-06, |
|
"loss": 0.0081, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 3.488, |
|
"grad_norm": 2.615633964538574, |
|
"learning_rate": 5.941052631578947e-06, |
|
"loss": 0.0076, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 3.504, |
|
"grad_norm": 0.9589405059814453, |
|
"learning_rate": 5.92e-06, |
|
"loss": 0.0078, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 1.0844931602478027, |
|
"learning_rate": 5.898947368421053e-06, |
|
"loss": 0.0064, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 3.536, |
|
"grad_norm": 0.5187249183654785, |
|
"learning_rate": 5.8800000000000005e-06, |
|
"loss": 0.0094, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 3.552, |
|
"grad_norm": 1.4108637571334839, |
|
"learning_rate": 5.858947368421053e-06, |
|
"loss": 0.0075, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 3.568, |
|
"grad_norm": 0.7219350337982178, |
|
"learning_rate": 5.837894736842106e-06, |
|
"loss": 0.0091, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 3.584, |
|
"grad_norm": 0.8116509914398193, |
|
"learning_rate": 5.816842105263158e-06, |
|
"loss": 0.008, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"grad_norm": 0.27414098381996155, |
|
"learning_rate": 5.795789473684211e-06, |
|
"loss": 0.0094, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"eval_loss": 0.03814266622066498, |
|
"eval_runtime": 1450.6262, |
|
"eval_samples_per_second": 3.889, |
|
"eval_steps_per_second": 3.889, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 3.616, |
|
"grad_norm": 0.7917847633361816, |
|
"learning_rate": 5.774736842105264e-06, |
|
"loss": 0.0074, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 3.632, |
|
"grad_norm": 0.7660414576530457, |
|
"learning_rate": 5.7536842105263155e-06, |
|
"loss": 0.0081, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 3.648, |
|
"grad_norm": 0.9440973401069641, |
|
"learning_rate": 5.732631578947368e-06, |
|
"loss": 0.0072, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 3.664, |
|
"grad_norm": 1.79617440700531, |
|
"learning_rate": 5.711578947368422e-06, |
|
"loss": 0.0094, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"grad_norm": 0.6188564896583557, |
|
"learning_rate": 5.690526315789474e-06, |
|
"loss": 0.0101, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 3.6959999999999997, |
|
"grad_norm": 1.2464138269424438, |
|
"learning_rate": 5.669473684210527e-06, |
|
"loss": 0.0073, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 3.7119999999999997, |
|
"grad_norm": 0.35902971029281616, |
|
"learning_rate": 5.64842105263158e-06, |
|
"loss": 0.007, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 3.7279999999999998, |
|
"grad_norm": 0.5012713670730591, |
|
"learning_rate": 5.627368421052632e-06, |
|
"loss": 0.0046, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 3.7439999999999998, |
|
"grad_norm": 1.280382752418518, |
|
"learning_rate": 5.606315789473684e-06, |
|
"loss": 0.0105, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"grad_norm": 0.7401492595672607, |
|
"learning_rate": 5.585263157894737e-06, |
|
"loss": 0.0079, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 3.776, |
|
"grad_norm": 0.9203608632087708, |
|
"learning_rate": 5.5642105263157894e-06, |
|
"loss": 0.0052, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 3.792, |
|
"grad_norm": 1.1353886127471924, |
|
"learning_rate": 5.543157894736843e-06, |
|
"loss": 0.0087, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 3.808, |
|
"grad_norm": 1.4035215377807617, |
|
"learning_rate": 5.522105263157896e-06, |
|
"loss": 0.0077, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 3.824, |
|
"grad_norm": 0.429685115814209, |
|
"learning_rate": 5.501052631578948e-06, |
|
"loss": 0.0084, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 0.868377685546875, |
|
"learning_rate": 5.480000000000001e-06, |
|
"loss": 0.0114, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 3.856, |
|
"grad_norm": 0.9295462369918823, |
|
"learning_rate": 5.458947368421053e-06, |
|
"loss": 0.0066, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 3.872, |
|
"grad_norm": 1.1884615421295166, |
|
"learning_rate": 5.437894736842105e-06, |
|
"loss": 0.0083, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 3.888, |
|
"grad_norm": 0.9172056913375854, |
|
"learning_rate": 5.416842105263158e-06, |
|
"loss": 0.0071, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 3.904, |
|
"grad_norm": 0.7041633725166321, |
|
"learning_rate": 5.395789473684211e-06, |
|
"loss": 0.0069, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"grad_norm": 0.7116276621818542, |
|
"learning_rate": 5.374736842105263e-06, |
|
"loss": 0.0076, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 3.936, |
|
"grad_norm": 0.7440093159675598, |
|
"learning_rate": 5.353684210526317e-06, |
|
"loss": 0.0076, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 3.952, |
|
"grad_norm": 1.1006033420562744, |
|
"learning_rate": 5.3326315789473695e-06, |
|
"loss": 0.0077, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 3.968, |
|
"grad_norm": 1.046329140663147, |
|
"learning_rate": 5.311578947368421e-06, |
|
"loss": 0.0089, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 3.984, |
|
"grad_norm": 0.9022756814956665, |
|
"learning_rate": 5.290526315789474e-06, |
|
"loss": 0.0073, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 1.0019030570983887, |
|
"learning_rate": 5.269473684210527e-06, |
|
"loss": 0.0074, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_loss": 0.03700412064790726, |
|
"eval_runtime": 1477.9736, |
|
"eval_samples_per_second": 3.817, |
|
"eval_steps_per_second": 3.817, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 4.016, |
|
"grad_norm": 1.2217806577682495, |
|
"learning_rate": 5.248421052631579e-06, |
|
"loss": 0.0037, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 4.032, |
|
"grad_norm": 0.7005161643028259, |
|
"learning_rate": 5.227368421052632e-06, |
|
"loss": 0.0047, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 4.048, |
|
"grad_norm": 0.3553329110145569, |
|
"learning_rate": 5.2063157894736846e-06, |
|
"loss": 0.0038, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 4.064, |
|
"grad_norm": 0.4705675542354584, |
|
"learning_rate": 5.185263157894738e-06, |
|
"loss": 0.0038, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"grad_norm": 0.9150580763816833, |
|
"learning_rate": 5.164210526315791e-06, |
|
"loss": 0.0055, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 4.096, |
|
"grad_norm": 0.7440536618232727, |
|
"learning_rate": 5.1431578947368425e-06, |
|
"loss": 0.0064, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 4.112, |
|
"grad_norm": 0.16911202669143677, |
|
"learning_rate": 5.122105263157895e-06, |
|
"loss": 0.0033, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 4.128, |
|
"grad_norm": 0.20625616610050201, |
|
"learning_rate": 5.101052631578948e-06, |
|
"loss": 0.0029, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 4.144, |
|
"grad_norm": 1.1767489910125732, |
|
"learning_rate": 5.0800000000000005e-06, |
|
"loss": 0.0043, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"grad_norm": 0.7702798247337341, |
|
"learning_rate": 5.058947368421053e-06, |
|
"loss": 0.002, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 4.176, |
|
"grad_norm": 0.9442148804664612, |
|
"learning_rate": 5.037894736842106e-06, |
|
"loss": 0.0049, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 4.192, |
|
"grad_norm": 0.439223974943161, |
|
"learning_rate": 5.0168421052631585e-06, |
|
"loss": 0.0033, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 4.208, |
|
"grad_norm": 0.2672136127948761, |
|
"learning_rate": 4.995789473684211e-06, |
|
"loss": 0.0055, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 4.224, |
|
"grad_norm": 0.1722344309091568, |
|
"learning_rate": 4.974736842105263e-06, |
|
"loss": 0.0049, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"grad_norm": 0.8054834604263306, |
|
"learning_rate": 4.953684210526316e-06, |
|
"loss": 0.0028, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 4.256, |
|
"grad_norm": 0.38357770442962646, |
|
"learning_rate": 4.932631578947369e-06, |
|
"loss": 0.0022, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 4.272, |
|
"grad_norm": 0.7503048777580261, |
|
"learning_rate": 4.911578947368422e-06, |
|
"loss": 0.0021, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 4.288, |
|
"grad_norm": 0.95183265209198, |
|
"learning_rate": 4.8905263157894735e-06, |
|
"loss": 0.0035, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 4.304, |
|
"grad_norm": 0.9550835490226746, |
|
"learning_rate": 4.869473684210527e-06, |
|
"loss": 0.003, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"grad_norm": 1.5608851909637451, |
|
"learning_rate": 4.84842105263158e-06, |
|
"loss": 0.0042, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 4.336, |
|
"grad_norm": 1.4856270551681519, |
|
"learning_rate": 4.8273684210526315e-06, |
|
"loss": 0.0048, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 4.352, |
|
"grad_norm": 0.11427481472492218, |
|
"learning_rate": 4.806315789473684e-06, |
|
"loss": 0.0022, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 4.368, |
|
"grad_norm": 1.5165557861328125, |
|
"learning_rate": 4.785263157894738e-06, |
|
"loss": 0.0037, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 4.384, |
|
"grad_norm": 0.30141228437423706, |
|
"learning_rate": 4.76421052631579e-06, |
|
"loss": 0.0026, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"grad_norm": 0.4358047842979431, |
|
"learning_rate": 4.743157894736842e-06, |
|
"loss": 0.0046, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"eval_loss": 0.0372794084250927, |
|
"eval_runtime": 1452.4236, |
|
"eval_samples_per_second": 3.885, |
|
"eval_steps_per_second": 3.885, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 4.416, |
|
"grad_norm": 0.8224077820777893, |
|
"learning_rate": 4.72421052631579e-06, |
|
"loss": 0.0046, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 4.432, |
|
"grad_norm": 1.4491019248962402, |
|
"learning_rate": 4.7031578947368425e-06, |
|
"loss": 0.0029, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 4.448, |
|
"grad_norm": 0.2398335337638855, |
|
"learning_rate": 4.682105263157895e-06, |
|
"loss": 0.0038, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 4.464, |
|
"grad_norm": 0.4416545629501343, |
|
"learning_rate": 4.661052631578948e-06, |
|
"loss": 0.0059, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 0.12480773776769638, |
|
"learning_rate": 4.6400000000000005e-06, |
|
"loss": 0.0036, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 4.496, |
|
"grad_norm": 0.9009574055671692, |
|
"learning_rate": 4.618947368421053e-06, |
|
"loss": 0.0044, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 4.5120000000000005, |
|
"grad_norm": 0.5740355253219604, |
|
"learning_rate": 4.597894736842106e-06, |
|
"loss": 0.0032, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 4.5280000000000005, |
|
"grad_norm": 0.6319159865379333, |
|
"learning_rate": 4.5768421052631584e-06, |
|
"loss": 0.0026, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 4.5440000000000005, |
|
"grad_norm": 0.710137665271759, |
|
"learning_rate": 4.55578947368421e-06, |
|
"loss": 0.0025, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 4.5600000000000005, |
|
"grad_norm": 0.3798495829105377, |
|
"learning_rate": 4.534736842105264e-06, |
|
"loss": 0.0032, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 4.576, |
|
"grad_norm": 0.09057649224996567, |
|
"learning_rate": 4.513684210526316e-06, |
|
"loss": 0.0019, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 4.592, |
|
"grad_norm": 0.10224127769470215, |
|
"learning_rate": 4.492631578947369e-06, |
|
"loss": 0.0024, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 4.608, |
|
"grad_norm": 0.45868900418281555, |
|
"learning_rate": 4.471578947368421e-06, |
|
"loss": 0.0033, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 4.624, |
|
"grad_norm": 0.12800952792167664, |
|
"learning_rate": 4.450526315789474e-06, |
|
"loss": 0.006, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"grad_norm": 0.12955500185489655, |
|
"learning_rate": 4.429473684210527e-06, |
|
"loss": 0.0028, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 4.656, |
|
"grad_norm": 0.3282037675380707, |
|
"learning_rate": 4.408421052631579e-06, |
|
"loss": 0.0032, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 4.672, |
|
"grad_norm": 0.480395644903183, |
|
"learning_rate": 4.3873684210526315e-06, |
|
"loss": 0.0039, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 4.688, |
|
"grad_norm": 1.337486743927002, |
|
"learning_rate": 4.366315789473685e-06, |
|
"loss": 0.0053, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 4.704, |
|
"grad_norm": 0.39609357714653015, |
|
"learning_rate": 4.345263157894738e-06, |
|
"loss": 0.0026, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"grad_norm": 0.2566475570201874, |
|
"learning_rate": 4.3242105263157894e-06, |
|
"loss": 0.0041, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 4.736, |
|
"grad_norm": 0.6467047929763794, |
|
"learning_rate": 4.303157894736842e-06, |
|
"loss": 0.0052, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 4.752, |
|
"grad_norm": 1.1100505590438843, |
|
"learning_rate": 4.282105263157896e-06, |
|
"loss": 0.0062, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 4.768, |
|
"grad_norm": 0.6272072196006775, |
|
"learning_rate": 4.261052631578947e-06, |
|
"loss": 0.0042, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 4.784, |
|
"grad_norm": 0.34918469190597534, |
|
"learning_rate": 4.24e-06, |
|
"loss": 0.0036, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"grad_norm": 0.37302619218826294, |
|
"learning_rate": 4.218947368421053e-06, |
|
"loss": 0.0036, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"eval_loss": 0.03739345446228981, |
|
"eval_runtime": 1465.397, |
|
"eval_samples_per_second": 3.85, |
|
"eval_steps_per_second": 3.85, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 4.816, |
|
"grad_norm": 0.5744602084159851, |
|
"learning_rate": 4.197894736842106e-06, |
|
"loss": 0.0061, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 4.832, |
|
"grad_norm": 1.2474104166030884, |
|
"learning_rate": 4.176842105263158e-06, |
|
"loss": 0.0035, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 4.848, |
|
"grad_norm": 0.25358426570892334, |
|
"learning_rate": 4.155789473684211e-06, |
|
"loss": 0.0039, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 4.864, |
|
"grad_norm": 1.7587385177612305, |
|
"learning_rate": 4.134736842105263e-06, |
|
"loss": 0.0043, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"grad_norm": 0.5396108627319336, |
|
"learning_rate": 4.113684210526316e-06, |
|
"loss": 0.0039, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 4.896, |
|
"grad_norm": 0.2963917553424835, |
|
"learning_rate": 4.092631578947369e-06, |
|
"loss": 0.0037, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 4.912, |
|
"grad_norm": 0.6284441947937012, |
|
"learning_rate": 4.071578947368421e-06, |
|
"loss": 0.0042, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 4.928, |
|
"grad_norm": 0.1446247100830078, |
|
"learning_rate": 4.050526315789474e-06, |
|
"loss": 0.0054, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 4.944, |
|
"grad_norm": 0.5421654582023621, |
|
"learning_rate": 4.029473684210527e-06, |
|
"loss": 0.0021, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"grad_norm": 0.10596631467342377, |
|
"learning_rate": 4.008421052631579e-06, |
|
"loss": 0.0032, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 4.976, |
|
"grad_norm": 0.36356231570243835, |
|
"learning_rate": 3.987368421052632e-06, |
|
"loss": 0.0029, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 4.992, |
|
"grad_norm": 0.13546526432037354, |
|
"learning_rate": 3.9663157894736846e-06, |
|
"loss": 0.0026, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 5.008, |
|
"grad_norm": 0.09022821485996246, |
|
"learning_rate": 3.945263157894737e-06, |
|
"loss": 0.0021, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 5.024, |
|
"grad_norm": 0.11343013495206833, |
|
"learning_rate": 3.92421052631579e-06, |
|
"loss": 0.0035, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"grad_norm": 0.39134401082992554, |
|
"learning_rate": 3.9031578947368425e-06, |
|
"loss": 0.0018, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 5.056, |
|
"grad_norm": 0.05532646179199219, |
|
"learning_rate": 3.882105263157895e-06, |
|
"loss": 0.0015, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 5.072, |
|
"grad_norm": 0.8032190203666687, |
|
"learning_rate": 3.861052631578948e-06, |
|
"loss": 0.0011, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 5.088, |
|
"grad_norm": 0.05971115827560425, |
|
"learning_rate": 3.8400000000000005e-06, |
|
"loss": 0.0015, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 5.104, |
|
"grad_norm": 0.690596342086792, |
|
"learning_rate": 3.818947368421053e-06, |
|
"loss": 0.0024, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"grad_norm": 0.15326976776123047, |
|
"learning_rate": 3.7978947368421054e-06, |
|
"loss": 0.0014, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 5.136, |
|
"grad_norm": 0.05565618351101875, |
|
"learning_rate": 3.776842105263158e-06, |
|
"loss": 0.0014, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 5.152, |
|
"grad_norm": 0.18008044362068176, |
|
"learning_rate": 3.755789473684211e-06, |
|
"loss": 0.0053, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 5.168, |
|
"grad_norm": 0.3844296932220459, |
|
"learning_rate": 3.7347368421052634e-06, |
|
"loss": 0.0018, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 5.184, |
|
"grad_norm": 0.15020981431007385, |
|
"learning_rate": 3.713684210526316e-06, |
|
"loss": 0.0013, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"grad_norm": 0.2373928427696228, |
|
"learning_rate": 3.6926315789473687e-06, |
|
"loss": 0.0013, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"eval_loss": 0.03558925539255142, |
|
"eval_runtime": 3288.1466, |
|
"eval_samples_per_second": 1.716, |
|
"eval_steps_per_second": 1.716, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 5.216, |
|
"grad_norm": 0.6427579522132874, |
|
"learning_rate": 3.6715789473684217e-06, |
|
"loss": 0.0024, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 5.232, |
|
"grad_norm": 0.09459923207759857, |
|
"learning_rate": 3.650526315789474e-06, |
|
"loss": 0.0014, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 5.248, |
|
"grad_norm": 0.4201672077178955, |
|
"learning_rate": 3.6294736842105266e-06, |
|
"loss": 0.0013, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 5.264, |
|
"grad_norm": 2.386017322540283, |
|
"learning_rate": 3.6084210526315793e-06, |
|
"loss": 0.0014, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"grad_norm": 0.05354594066739082, |
|
"learning_rate": 3.5873684210526315e-06, |
|
"loss": 0.0012, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 5.296, |
|
"grad_norm": 0.4693075120449066, |
|
"learning_rate": 3.5663157894736846e-06, |
|
"loss": 0.0011, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 5.312, |
|
"grad_norm": 0.23063229024410248, |
|
"learning_rate": 3.5452631578947372e-06, |
|
"loss": 0.0011, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 5.328, |
|
"grad_norm": 0.6210919618606567, |
|
"learning_rate": 3.52421052631579e-06, |
|
"loss": 0.002, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 5.344, |
|
"grad_norm": 0.19046130776405334, |
|
"learning_rate": 3.503157894736842e-06, |
|
"loss": 0.0011, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"grad_norm": 0.8910662531852722, |
|
"learning_rate": 3.482105263157895e-06, |
|
"loss": 0.0037, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 5.376, |
|
"grad_norm": 1.3482168912887573, |
|
"learning_rate": 3.461052631578948e-06, |
|
"loss": 0.0018, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 5.392, |
|
"grad_norm": 0.053552981466054916, |
|
"learning_rate": 3.44e-06, |
|
"loss": 0.001, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 5.408, |
|
"grad_norm": 0.165148064494133, |
|
"learning_rate": 3.4189473684210528e-06, |
|
"loss": 0.0018, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 5.424, |
|
"grad_norm": 0.1561366617679596, |
|
"learning_rate": 3.3978947368421054e-06, |
|
"loss": 0.0017, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"grad_norm": 0.0557156503200531, |
|
"learning_rate": 3.3768421052631585e-06, |
|
"loss": 0.0011, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 5.456, |
|
"grad_norm": 0.045138657093048096, |
|
"learning_rate": 3.3557894736842107e-06, |
|
"loss": 0.0008, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 5.4719999999999995, |
|
"grad_norm": 0.6297252178192139, |
|
"learning_rate": 3.3347368421052634e-06, |
|
"loss": 0.0023, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 5.4879999999999995, |
|
"grad_norm": 0.431007444858551, |
|
"learning_rate": 3.313684210526316e-06, |
|
"loss": 0.0014, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 5.504, |
|
"grad_norm": 0.09312240779399872, |
|
"learning_rate": 3.292631578947369e-06, |
|
"loss": 0.002, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"grad_norm": 0.19819211959838867, |
|
"learning_rate": 3.2715789473684213e-06, |
|
"loss": 0.001, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 5.536, |
|
"grad_norm": 0.09475424885749817, |
|
"learning_rate": 3.250526315789474e-06, |
|
"loss": 0.0019, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 5.552, |
|
"grad_norm": 0.034463245421648026, |
|
"learning_rate": 3.2294736842105266e-06, |
|
"loss": 0.0024, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 5.568, |
|
"grad_norm": 0.037909191101789474, |
|
"learning_rate": 3.208421052631579e-06, |
|
"loss": 0.0019, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 5.584, |
|
"grad_norm": 0.14103959500789642, |
|
"learning_rate": 3.187368421052632e-06, |
|
"loss": 0.0023, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"grad_norm": 0.6614345908164978, |
|
"learning_rate": 3.1663157894736846e-06, |
|
"loss": 0.0014, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"eval_loss": 0.03668233007192612, |
|
"eval_runtime": 1484.1952, |
|
"eval_samples_per_second": 3.801, |
|
"eval_steps_per_second": 3.801, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 5.616, |
|
"grad_norm": 0.051937129348516464, |
|
"learning_rate": 3.1452631578947373e-06, |
|
"loss": 0.0021, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 5.632, |
|
"grad_norm": 0.07696125656366348, |
|
"learning_rate": 3.1242105263157895e-06, |
|
"loss": 0.0017, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 5.648, |
|
"grad_norm": 0.2881506681442261, |
|
"learning_rate": 3.1031578947368426e-06, |
|
"loss": 0.0014, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 5.664, |
|
"grad_norm": 0.06252899765968323, |
|
"learning_rate": 3.0821052631578952e-06, |
|
"loss": 0.0017, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"grad_norm": 0.4536062180995941, |
|
"learning_rate": 3.0610526315789475e-06, |
|
"loss": 0.0015, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 5.696, |
|
"grad_norm": 0.06475075334310532, |
|
"learning_rate": 3.04e-06, |
|
"loss": 0.0007, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 5.712, |
|
"grad_norm": 0.10876597464084625, |
|
"learning_rate": 3.0189473684210528e-06, |
|
"loss": 0.0012, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 5.728, |
|
"grad_norm": 0.04506557062268257, |
|
"learning_rate": 2.997894736842106e-06, |
|
"loss": 0.0012, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 5.744, |
|
"grad_norm": 0.08606194704771042, |
|
"learning_rate": 2.976842105263158e-06, |
|
"loss": 0.0008, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"grad_norm": 0.10132581740617752, |
|
"learning_rate": 2.9557894736842107e-06, |
|
"loss": 0.0008, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 5.776, |
|
"grad_norm": 0.07705429196357727, |
|
"learning_rate": 2.9347368421052634e-06, |
|
"loss": 0.0006, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 5.792, |
|
"grad_norm": 0.09106363356113434, |
|
"learning_rate": 2.9136842105263156e-06, |
|
"loss": 0.0018, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 5.808, |
|
"grad_norm": 0.23617351055145264, |
|
"learning_rate": 2.8926315789473687e-06, |
|
"loss": 0.0019, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 5.824, |
|
"grad_norm": 0.20716822147369385, |
|
"learning_rate": 2.8715789473684214e-06, |
|
"loss": 0.0013, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"grad_norm": 0.21701906621456146, |
|
"learning_rate": 2.850526315789474e-06, |
|
"loss": 0.0014, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 5.856, |
|
"grad_norm": 0.611148476600647, |
|
"learning_rate": 2.8294736842105262e-06, |
|
"loss": 0.0016, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 5.872, |
|
"grad_norm": 0.28972306847572327, |
|
"learning_rate": 2.8084210526315793e-06, |
|
"loss": 0.0021, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 5.888, |
|
"grad_norm": 0.13573560118675232, |
|
"learning_rate": 2.787368421052632e-06, |
|
"loss": 0.0023, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 5.904, |
|
"grad_norm": 0.15231332182884216, |
|
"learning_rate": 2.766315789473684e-06, |
|
"loss": 0.0017, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"grad_norm": 0.24314993619918823, |
|
"learning_rate": 2.745263157894737e-06, |
|
"loss": 0.0015, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 5.936, |
|
"grad_norm": 0.2743528485298157, |
|
"learning_rate": 2.72421052631579e-06, |
|
"loss": 0.0011, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 5.952, |
|
"grad_norm": 0.20800799131393433, |
|
"learning_rate": 2.7031578947368426e-06, |
|
"loss": 0.0009, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 5.968, |
|
"grad_norm": 0.04552097246050835, |
|
"learning_rate": 2.682105263157895e-06, |
|
"loss": 0.0014, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 5.984, |
|
"grad_norm": 0.7857608199119568, |
|
"learning_rate": 2.6610526315789475e-06, |
|
"loss": 0.0022, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"grad_norm": 0.8046532273292542, |
|
"learning_rate": 2.64e-06, |
|
"loss": 0.0018, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_loss": 0.037111204117536545, |
|
"eval_runtime": 1495.9727, |
|
"eval_samples_per_second": 3.771, |
|
"eval_steps_per_second": 3.771, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 6.016, |
|
"grad_norm": 0.09064371883869171, |
|
"learning_rate": 2.6189473684210524e-06, |
|
"loss": 0.0005, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 6.032, |
|
"grad_norm": 0.148435577750206, |
|
"learning_rate": 2.5978947368421054e-06, |
|
"loss": 0.0008, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 6.048, |
|
"grad_norm": 0.03715546801686287, |
|
"learning_rate": 2.576842105263158e-06, |
|
"loss": 0.0007, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 6.064, |
|
"grad_norm": 0.024771861732006073, |
|
"learning_rate": 2.5557894736842108e-06, |
|
"loss": 0.0005, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"grad_norm": 0.08074558526277542, |
|
"learning_rate": 2.534736842105263e-06, |
|
"loss": 0.0013, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 6.096, |
|
"grad_norm": 0.054898303002119064, |
|
"learning_rate": 2.513684210526316e-06, |
|
"loss": 0.0007, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 6.112, |
|
"grad_norm": 0.3437640070915222, |
|
"learning_rate": 2.4926315789473687e-06, |
|
"loss": 0.0007, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 6.128, |
|
"grad_norm": 0.12333539873361588, |
|
"learning_rate": 2.4715789473684214e-06, |
|
"loss": 0.0004, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 6.144, |
|
"grad_norm": 0.027326831594109535, |
|
"learning_rate": 2.4505263157894736e-06, |
|
"loss": 0.0005, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"grad_norm": 0.025773270055651665, |
|
"learning_rate": 2.4294736842105267e-06, |
|
"loss": 0.0005, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 6.176, |
|
"grad_norm": 0.04546317830681801, |
|
"learning_rate": 2.408421052631579e-06, |
|
"loss": 0.0009, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 6.192, |
|
"grad_norm": 0.054939109832048416, |
|
"learning_rate": 2.387368421052632e-06, |
|
"loss": 0.0005, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 6.208, |
|
"grad_norm": 0.17456945776939392, |
|
"learning_rate": 2.3663157894736842e-06, |
|
"loss": 0.0004, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 6.224, |
|
"grad_norm": 0.04125133529305458, |
|
"learning_rate": 2.3452631578947373e-06, |
|
"loss": 0.0004, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"grad_norm": 0.2721328139305115, |
|
"learning_rate": 2.3242105263157895e-06, |
|
"loss": 0.0005, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 6.256, |
|
"grad_norm": 0.20841915905475616, |
|
"learning_rate": 2.3031578947368426e-06, |
|
"loss": 0.0004, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 6.272, |
|
"grad_norm": 0.024725254625082016, |
|
"learning_rate": 2.282105263157895e-06, |
|
"loss": 0.0011, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 6.288, |
|
"grad_norm": 0.04614824429154396, |
|
"learning_rate": 2.2610526315789475e-06, |
|
"loss": 0.0004, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 6.304, |
|
"grad_norm": 0.02417997643351555, |
|
"learning_rate": 2.24e-06, |
|
"loss": 0.0004, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"grad_norm": 0.025578554719686508, |
|
"learning_rate": 2.218947368421053e-06, |
|
"loss": 0.0005, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 6.336, |
|
"grad_norm": 0.024059904739260674, |
|
"learning_rate": 2.1978947368421055e-06, |
|
"loss": 0.0004, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 6.352, |
|
"grad_norm": 0.01979261450469494, |
|
"learning_rate": 2.176842105263158e-06, |
|
"loss": 0.0006, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 6.368, |
|
"grad_norm": 0.0502333827316761, |
|
"learning_rate": 2.1557894736842108e-06, |
|
"loss": 0.0007, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 6.384, |
|
"grad_norm": 0.390522837638855, |
|
"learning_rate": 2.1347368421052634e-06, |
|
"loss": 0.0005, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"grad_norm": 0.21075351536273956, |
|
"learning_rate": 2.1136842105263157e-06, |
|
"loss": 0.0008, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"eval_loss": 0.03717663884162903, |
|
"eval_runtime": 1496.5829, |
|
"eval_samples_per_second": 3.77, |
|
"eval_steps_per_second": 3.77, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 6.416, |
|
"grad_norm": 0.08990900218486786, |
|
"learning_rate": 2.0926315789473687e-06, |
|
"loss": 0.0004, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 6.432, |
|
"grad_norm": 0.11204410344362259, |
|
"learning_rate": 2.071578947368421e-06, |
|
"loss": 0.0003, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 6.448, |
|
"grad_norm": 0.038647376000881195, |
|
"learning_rate": 2.050526315789474e-06, |
|
"loss": 0.0007, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 6.464, |
|
"grad_norm": 0.01824771985411644, |
|
"learning_rate": 2.0294736842105263e-06, |
|
"loss": 0.0003, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"grad_norm": 0.02977917715907097, |
|
"learning_rate": 2.0084210526315794e-06, |
|
"loss": 0.0004, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 6.496, |
|
"grad_norm": 0.05292543023824692, |
|
"learning_rate": 1.9873684210526316e-06, |
|
"loss": 0.0005, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 6.5120000000000005, |
|
"grad_norm": 0.011542899534106255, |
|
"learning_rate": 1.9663157894736842e-06, |
|
"loss": 0.0005, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 6.5280000000000005, |
|
"grad_norm": 0.5990496277809143, |
|
"learning_rate": 1.945263157894737e-06, |
|
"loss": 0.0005, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 6.5440000000000005, |
|
"grad_norm": 0.8402637839317322, |
|
"learning_rate": 1.9242105263157896e-06, |
|
"loss": 0.0004, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 6.5600000000000005, |
|
"grad_norm": 0.02681225724518299, |
|
"learning_rate": 1.9031578947368424e-06, |
|
"loss": 0.0005, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 6.576, |
|
"grad_norm": 0.05136146768927574, |
|
"learning_rate": 1.8821052631578949e-06, |
|
"loss": 0.0009, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 6.592, |
|
"grad_norm": 0.02351960726082325, |
|
"learning_rate": 1.8610526315789475e-06, |
|
"loss": 0.0004, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 6.608, |
|
"grad_norm": 0.0421082004904747, |
|
"learning_rate": 1.8400000000000002e-06, |
|
"loss": 0.0005, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 6.624, |
|
"grad_norm": 0.039992865175008774, |
|
"learning_rate": 1.8189473684210528e-06, |
|
"loss": 0.0007, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"grad_norm": 0.044527649879455566, |
|
"learning_rate": 1.7978947368421055e-06, |
|
"loss": 0.0014, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 6.656, |
|
"grad_norm": 0.3133164942264557, |
|
"learning_rate": 1.776842105263158e-06, |
|
"loss": 0.0011, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 6.672, |
|
"grad_norm": 0.013667403720319271, |
|
"learning_rate": 1.7557894736842108e-06, |
|
"loss": 0.0018, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 6.688, |
|
"grad_norm": 0.06877803057432175, |
|
"learning_rate": 1.7347368421052632e-06, |
|
"loss": 0.0003, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 6.704, |
|
"grad_norm": 0.03892038017511368, |
|
"learning_rate": 1.713684210526316e-06, |
|
"loss": 0.0003, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"grad_norm": 0.05400446802377701, |
|
"learning_rate": 1.6926315789473685e-06, |
|
"loss": 0.0003, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 6.736, |
|
"grad_norm": 0.04348497465252876, |
|
"learning_rate": 1.6715789473684212e-06, |
|
"loss": 0.0012, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 6.752, |
|
"grad_norm": 0.1150202602148056, |
|
"learning_rate": 1.6505263157894739e-06, |
|
"loss": 0.0004, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 6.768, |
|
"grad_norm": 0.022777294740080833, |
|
"learning_rate": 1.6294736842105263e-06, |
|
"loss": 0.0003, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 6.784, |
|
"grad_norm": 0.05009898915886879, |
|
"learning_rate": 1.6084210526315792e-06, |
|
"loss": 0.0004, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"grad_norm": 0.012213055975735188, |
|
"learning_rate": 1.5873684210526316e-06, |
|
"loss": 0.0003, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"eval_loss": 0.03691783547401428, |
|
"eval_runtime": 1507.6111, |
|
"eval_samples_per_second": 3.742, |
|
"eval_steps_per_second": 3.742, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 6.816, |
|
"grad_norm": 0.23179693520069122, |
|
"learning_rate": 1.5663157894736845e-06, |
|
"loss": 0.0005, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 6.832, |
|
"grad_norm": 0.03861673176288605, |
|
"learning_rate": 1.545263157894737e-06, |
|
"loss": 0.0004, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 6.848, |
|
"grad_norm": 0.1452307105064392, |
|
"learning_rate": 1.5242105263157898e-06, |
|
"loss": 0.0003, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 6.864, |
|
"grad_norm": 0.050570085644721985, |
|
"learning_rate": 1.5031578947368422e-06, |
|
"loss": 0.0003, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"grad_norm": 0.08024540543556213, |
|
"learning_rate": 1.482105263157895e-06, |
|
"loss": 0.0004, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 6.896, |
|
"grad_norm": 0.02591465599834919, |
|
"learning_rate": 1.4610526315789475e-06, |
|
"loss": 0.0005, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 6.912, |
|
"grad_norm": 0.01832268387079239, |
|
"learning_rate": 1.44e-06, |
|
"loss": 0.0004, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 6.928, |
|
"grad_norm": 0.044344332069158554, |
|
"learning_rate": 1.4189473684210528e-06, |
|
"loss": 0.0004, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 6.944, |
|
"grad_norm": 0.03570440039038658, |
|
"learning_rate": 1.3978947368421053e-06, |
|
"loss": 0.0003, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"grad_norm": 0.18772557377815247, |
|
"learning_rate": 1.3768421052631582e-06, |
|
"loss": 0.0004, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 6.976, |
|
"grad_norm": 0.05984479933977127, |
|
"learning_rate": 1.3557894736842106e-06, |
|
"loss": 0.0003, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 6.992, |
|
"grad_norm": 0.3860021233558655, |
|
"learning_rate": 1.3347368421052635e-06, |
|
"loss": 0.0006, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 7.008, |
|
"grad_norm": 0.015999499708414078, |
|
"learning_rate": 1.313684210526316e-06, |
|
"loss": 0.0003, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 7.024, |
|
"grad_norm": 0.027498619630932808, |
|
"learning_rate": 1.2926315789473683e-06, |
|
"loss": 0.0002, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"grad_norm": 0.24971920251846313, |
|
"learning_rate": 1.2715789473684212e-06, |
|
"loss": 0.0004, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 7.056, |
|
"grad_norm": 0.011065910570323467, |
|
"learning_rate": 1.2505263157894737e-06, |
|
"loss": 0.0003, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 7.072, |
|
"grad_norm": 0.01698007434606552, |
|
"learning_rate": 1.2294736842105263e-06, |
|
"loss": 0.0008, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 7.088, |
|
"grad_norm": 0.02532590739428997, |
|
"learning_rate": 1.208421052631579e-06, |
|
"loss": 0.0003, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 7.104, |
|
"grad_norm": 0.01716386154294014, |
|
"learning_rate": 1.1873684210526316e-06, |
|
"loss": 0.0003, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"grad_norm": 0.016450321301817894, |
|
"learning_rate": 1.1663157894736843e-06, |
|
"loss": 0.0002, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 7.136, |
|
"grad_norm": 0.012627181597054005, |
|
"learning_rate": 1.145263157894737e-06, |
|
"loss": 0.0003, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 7.152, |
|
"grad_norm": 0.05683906376361847, |
|
"learning_rate": 1.1242105263157896e-06, |
|
"loss": 0.0003, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 7.168, |
|
"grad_norm": 0.03171288222074509, |
|
"learning_rate": 1.1031578947368422e-06, |
|
"loss": 0.0002, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 7.184, |
|
"grad_norm": 0.02577533759176731, |
|
"learning_rate": 1.082105263157895e-06, |
|
"loss": 0.0003, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"grad_norm": 0.019202925264835358, |
|
"learning_rate": 1.0610526315789473e-06, |
|
"loss": 0.0003, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"eval_loss": 0.037218254059553146, |
|
"eval_runtime": 1593.609, |
|
"eval_samples_per_second": 3.54, |
|
"eval_steps_per_second": 3.54, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 7.216, |
|
"grad_norm": 0.01943960040807724, |
|
"learning_rate": 1.04e-06, |
|
"loss": 0.0003, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 7.232, |
|
"grad_norm": 0.012748222798109055, |
|
"learning_rate": 1.0189473684210527e-06, |
|
"loss": 0.0002, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 7.248, |
|
"grad_norm": 0.017005812376737595, |
|
"learning_rate": 9.978947368421053e-07, |
|
"loss": 0.0003, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 7.264, |
|
"grad_norm": 0.024691853672266006, |
|
"learning_rate": 9.76842105263158e-07, |
|
"loss": 0.0002, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"grad_norm": 0.3427879512310028, |
|
"learning_rate": 9.557894736842106e-07, |
|
"loss": 0.0004, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 7.296, |
|
"grad_norm": 0.03403201699256897, |
|
"learning_rate": 9.347368421052633e-07, |
|
"loss": 0.0002, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 7.312, |
|
"grad_norm": 0.022941039875149727, |
|
"learning_rate": 9.136842105263159e-07, |
|
"loss": 0.0003, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 7.328, |
|
"grad_norm": 0.032287679612636566, |
|
"learning_rate": 8.926315789473685e-07, |
|
"loss": 0.0002, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 7.344, |
|
"grad_norm": 0.013065003789961338, |
|
"learning_rate": 8.715789473684211e-07, |
|
"loss": 0.0002, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"grad_norm": 0.026764079928398132, |
|
"learning_rate": 8.505263157894737e-07, |
|
"loss": 0.0002, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 7.376, |
|
"grad_norm": 0.013478124514222145, |
|
"learning_rate": 8.294736842105263e-07, |
|
"loss": 0.0002, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 7.392, |
|
"grad_norm": 0.014998686499893665, |
|
"learning_rate": 8.08421052631579e-07, |
|
"loss": 0.0003, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 7.408, |
|
"grad_norm": 0.015325380489230156, |
|
"learning_rate": 7.873684210526316e-07, |
|
"loss": 0.0002, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 7.424, |
|
"grad_norm": 0.018431052565574646, |
|
"learning_rate": 7.663157894736843e-07, |
|
"loss": 0.0002, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"grad_norm": 0.01385441143065691, |
|
"learning_rate": 7.45263157894737e-07, |
|
"loss": 0.0003, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 7.456, |
|
"grad_norm": 0.018942181020975113, |
|
"learning_rate": 7.242105263157895e-07, |
|
"loss": 0.0002, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 7.4719999999999995, |
|
"grad_norm": 0.036915648728609085, |
|
"learning_rate": 7.031578947368422e-07, |
|
"loss": 0.0002, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 7.4879999999999995, |
|
"grad_norm": 0.053086813539266586, |
|
"learning_rate": 6.821052631578948e-07, |
|
"loss": 0.0002, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 7.504, |
|
"grad_norm": 0.028192859143018723, |
|
"learning_rate": 6.610526315789475e-07, |
|
"loss": 0.0003, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"grad_norm": 0.6953186988830566, |
|
"learning_rate": 6.4e-07, |
|
"loss": 0.0005, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 7.536, |
|
"grad_norm": 0.016007578000426292, |
|
"learning_rate": 6.189473684210527e-07, |
|
"loss": 0.0002, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 7.552, |
|
"grad_norm": 0.029922867193818092, |
|
"learning_rate": 5.978947368421053e-07, |
|
"loss": 0.0003, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 7.568, |
|
"grad_norm": 0.01892116479575634, |
|
"learning_rate": 5.76842105263158e-07, |
|
"loss": 0.0003, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 7.584, |
|
"grad_norm": 0.03329944983124733, |
|
"learning_rate": 5.557894736842106e-07, |
|
"loss": 0.0002, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"grad_norm": 0.025351159274578094, |
|
"learning_rate": 5.347368421052633e-07, |
|
"loss": 0.0002, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"eval_loss": 0.03762885555624962, |
|
"eval_runtime": 1462.6796, |
|
"eval_samples_per_second": 3.857, |
|
"eval_steps_per_second": 3.857, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 7.616, |
|
"grad_norm": 0.017339129000902176, |
|
"learning_rate": 5.136842105263158e-07, |
|
"loss": 0.0002, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 7.632, |
|
"grad_norm": 0.016879908740520477, |
|
"learning_rate": 4.926315789473685e-07, |
|
"loss": 0.0004, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 7.648, |
|
"grad_norm": 0.02509094960987568, |
|
"learning_rate": 4.715789473684211e-07, |
|
"loss": 0.0002, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 7.664, |
|
"grad_norm": 0.017198877409100533, |
|
"learning_rate": 4.505263157894737e-07, |
|
"loss": 0.0003, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"grad_norm": 0.014106789603829384, |
|
"learning_rate": 4.2947368421052635e-07, |
|
"loss": 0.0002, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 7.696, |
|
"grad_norm": 0.03618380054831505, |
|
"learning_rate": 4.08421052631579e-07, |
|
"loss": 0.0002, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 7.712, |
|
"grad_norm": 0.013972431421279907, |
|
"learning_rate": 3.873684210526316e-07, |
|
"loss": 0.0002, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 7.728, |
|
"grad_norm": 0.01894882321357727, |
|
"learning_rate": 3.663157894736842e-07, |
|
"loss": 0.0002, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 7.744, |
|
"grad_norm": 0.01931534893810749, |
|
"learning_rate": 3.4526315789473686e-07, |
|
"loss": 0.0003, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"grad_norm": 0.015359962359070778, |
|
"learning_rate": 3.242105263157895e-07, |
|
"loss": 0.0002, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 7.776, |
|
"grad_norm": 0.0260631013661623, |
|
"learning_rate": 3.031578947368421e-07, |
|
"loss": 0.0002, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 7.792, |
|
"grad_norm": 0.06113347038626671, |
|
"learning_rate": 2.821052631578948e-07, |
|
"loss": 0.0002, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 7.808, |
|
"grad_norm": 0.0257665254175663, |
|
"learning_rate": 2.610526315789474e-07, |
|
"loss": 0.0003, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 7.824, |
|
"grad_norm": 0.01588517054915428, |
|
"learning_rate": 2.4000000000000003e-07, |
|
"loss": 0.0002, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"grad_norm": 0.012915355153381824, |
|
"learning_rate": 2.1894736842105263e-07, |
|
"loss": 0.0002, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 7.856, |
|
"grad_norm": 0.01570785976946354, |
|
"learning_rate": 1.978947368421053e-07, |
|
"loss": 0.0002, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 7.872, |
|
"grad_norm": 0.020730208605527878, |
|
"learning_rate": 1.768421052631579e-07, |
|
"loss": 0.0002, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 7.888, |
|
"grad_norm": 0.03537014126777649, |
|
"learning_rate": 1.5578947368421054e-07, |
|
"loss": 0.0003, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 7.904, |
|
"grad_norm": 0.022489851340651512, |
|
"learning_rate": 1.3473684210526317e-07, |
|
"loss": 0.0002, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"grad_norm": 0.017718670889735222, |
|
"learning_rate": 1.136842105263158e-07, |
|
"loss": 0.0002, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 7.936, |
|
"grad_norm": 0.016573410481214523, |
|
"learning_rate": 9.263157894736844e-08, |
|
"loss": 0.0007, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 7.952, |
|
"grad_norm": 0.008749530650675297, |
|
"learning_rate": 7.157894736842106e-08, |
|
"loss": 0.0003, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 7.968, |
|
"grad_norm": 0.04577890783548355, |
|
"learning_rate": 5.0526315789473687e-08, |
|
"loss": 0.0003, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 7.984, |
|
"grad_norm": 0.061525970697402954, |
|
"learning_rate": 2.9473684210526318e-08, |
|
"loss": 0.0002, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"grad_norm": 0.029113447293639183, |
|
"learning_rate": 8.421052631578948e-09, |
|
"loss": 0.0002, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_loss": 0.03780149295926094, |
|
"eval_runtime": 1480.7553, |
|
"eval_samples_per_second": 3.81, |
|
"eval_steps_per_second": 3.81, |
|
"step": 5000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 5000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 8, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.727921844224e+20, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|