|
{ |
|
"best_metric": 0.7716161012649536, |
|
"best_model_checkpoint": "./outputs/lora-out/checkpoint-11000", |
|
"epoch": 1.3056379821958457, |
|
"eval_steps": 500, |
|
"global_step": 11000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0011869436201780415, |
|
"grad_norm": 0.5230351686477661, |
|
"learning_rate": 5.938242280285035e-07, |
|
"loss": 1.0767, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.002373887240356083, |
|
"grad_norm": 0.37288129329681396, |
|
"learning_rate": 1.187648456057007e-06, |
|
"loss": 1.1895, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0035608308605341245, |
|
"grad_norm": 0.42279818654060364, |
|
"learning_rate": 1.7814726840855108e-06, |
|
"loss": 1.1203, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.004747774480712166, |
|
"grad_norm": 0.47712886333465576, |
|
"learning_rate": 2.375296912114014e-06, |
|
"loss": 1.1429, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.005934718100890208, |
|
"grad_norm": 0.3551290035247803, |
|
"learning_rate": 2.9691211401425176e-06, |
|
"loss": 1.1629, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.007121661721068249, |
|
"grad_norm": 0.4963815212249756, |
|
"learning_rate": 3.5629453681710215e-06, |
|
"loss": 1.0595, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.00830860534124629, |
|
"grad_norm": 0.3718396723270416, |
|
"learning_rate": 4.156769596199525e-06, |
|
"loss": 1.0844, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.009495548961424332, |
|
"grad_norm": 0.4744749069213867, |
|
"learning_rate": 4.750593824228028e-06, |
|
"loss": 1.0476, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.010682492581602374, |
|
"grad_norm": 0.4049204885959625, |
|
"learning_rate": 5.344418052256532e-06, |
|
"loss": 1.0769, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.011869436201780416, |
|
"grad_norm": 0.5128685235977173, |
|
"learning_rate": 5.938242280285035e-06, |
|
"loss": 1.0685, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.013056379821958458, |
|
"grad_norm": 0.4374200403690338, |
|
"learning_rate": 6.532066508313539e-06, |
|
"loss": 1.1161, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.014243323442136498, |
|
"grad_norm": 0.3933285176753998, |
|
"learning_rate": 7.125890736342043e-06, |
|
"loss": 1.0004, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.01543026706231454, |
|
"grad_norm": 0.41642361879348755, |
|
"learning_rate": 7.719714964370546e-06, |
|
"loss": 1.0188, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.01661721068249258, |
|
"grad_norm": 0.6790082454681396, |
|
"learning_rate": 8.31353919239905e-06, |
|
"loss": 1.0037, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.017804154302670624, |
|
"grad_norm": 0.46090933680534363, |
|
"learning_rate": 8.907363420427554e-06, |
|
"loss": 1.1503, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.018991097922848664, |
|
"grad_norm": 0.46357613801956177, |
|
"learning_rate": 9.501187648456057e-06, |
|
"loss": 0.937, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.020178041543026708, |
|
"grad_norm": 0.5010367035865784, |
|
"learning_rate": 1.0095011876484562e-05, |
|
"loss": 1.1009, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.021364985163204748, |
|
"grad_norm": 0.53301602602005, |
|
"learning_rate": 1.0688836104513065e-05, |
|
"loss": 0.983, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.022551928783382788, |
|
"grad_norm": 0.5423634052276611, |
|
"learning_rate": 1.1282660332541568e-05, |
|
"loss": 1.0063, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.02373887240356083, |
|
"grad_norm": 0.7087329626083374, |
|
"learning_rate": 1.187648456057007e-05, |
|
"loss": 0.9804, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.024925816023738872, |
|
"grad_norm": 0.5914109945297241, |
|
"learning_rate": 1.2470308788598575e-05, |
|
"loss": 1.0208, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.026112759643916916, |
|
"grad_norm": 0.4900115430355072, |
|
"learning_rate": 1.3064133016627078e-05, |
|
"loss": 0.9603, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.027299703264094956, |
|
"grad_norm": 0.5469143986701965, |
|
"learning_rate": 1.3657957244655583e-05, |
|
"loss": 1.0107, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.028486646884272996, |
|
"grad_norm": 0.6561472415924072, |
|
"learning_rate": 1.4251781472684086e-05, |
|
"loss": 1.0653, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.02967359050445104, |
|
"grad_norm": 0.6878321766853333, |
|
"learning_rate": 1.4845605700712589e-05, |
|
"loss": 0.9832, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.03086053412462908, |
|
"grad_norm": 0.4900878965854645, |
|
"learning_rate": 1.5439429928741092e-05, |
|
"loss": 0.941, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.032047477744807124, |
|
"grad_norm": 0.7876664996147156, |
|
"learning_rate": 1.60332541567696e-05, |
|
"loss": 0.9555, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.03323442136498516, |
|
"grad_norm": 1.0006091594696045, |
|
"learning_rate": 1.66270783847981e-05, |
|
"loss": 0.9885, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.034421364985163204, |
|
"grad_norm": 0.8071354627609253, |
|
"learning_rate": 1.7220902612826605e-05, |
|
"loss": 0.8773, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.03560830860534125, |
|
"grad_norm": 0.9984568953514099, |
|
"learning_rate": 1.7814726840855108e-05, |
|
"loss": 0.9989, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.03679525222551929, |
|
"grad_norm": 0.8211751580238342, |
|
"learning_rate": 1.840855106888361e-05, |
|
"loss": 0.8656, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.03798219584569733, |
|
"grad_norm": 1.264052152633667, |
|
"learning_rate": 1.9002375296912114e-05, |
|
"loss": 0.9641, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.03916913946587537, |
|
"grad_norm": 0.8444392085075378, |
|
"learning_rate": 1.9596199524940617e-05, |
|
"loss": 0.8856, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.040356083086053415, |
|
"grad_norm": 0.7926741242408752, |
|
"learning_rate": 2.0190023752969123e-05, |
|
"loss": 0.8962, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.04154302670623145, |
|
"grad_norm": 0.8968790173530579, |
|
"learning_rate": 2.0783847980997626e-05, |
|
"loss": 0.9583, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.042729970326409496, |
|
"grad_norm": 0.9435778260231018, |
|
"learning_rate": 2.137767220902613e-05, |
|
"loss": 0.9534, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.04391691394658754, |
|
"grad_norm": 0.9596304893493652, |
|
"learning_rate": 2.1971496437054635e-05, |
|
"loss": 0.9235, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.045103857566765576, |
|
"grad_norm": 0.9254686236381531, |
|
"learning_rate": 2.2565320665083135e-05, |
|
"loss": 0.9345, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.04629080118694362, |
|
"grad_norm": 0.7298787832260132, |
|
"learning_rate": 2.3159144893111638e-05, |
|
"loss": 0.9894, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.04747774480712166, |
|
"grad_norm": 0.8593952655792236, |
|
"learning_rate": 2.375296912114014e-05, |
|
"loss": 0.9318, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.0486646884272997, |
|
"grad_norm": 0.9398151636123657, |
|
"learning_rate": 2.4346793349168648e-05, |
|
"loss": 0.9175, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.049851632047477744, |
|
"grad_norm": 0.9733231663703918, |
|
"learning_rate": 2.494061757719715e-05, |
|
"loss": 1.0808, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.05103857566765579, |
|
"grad_norm": 1.7867991924285889, |
|
"learning_rate": 2.5534441805225657e-05, |
|
"loss": 0.9826, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.05222551928783383, |
|
"grad_norm": 0.951397716999054, |
|
"learning_rate": 2.6128266033254157e-05, |
|
"loss": 0.9248, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.05341246290801187, |
|
"grad_norm": 0.7252931594848633, |
|
"learning_rate": 2.6722090261282663e-05, |
|
"loss": 0.9616, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.05459940652818991, |
|
"grad_norm": 1.4507744312286377, |
|
"learning_rate": 2.7315914489311166e-05, |
|
"loss": 0.9346, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.055786350148367955, |
|
"grad_norm": 0.9934329390525818, |
|
"learning_rate": 2.790973871733967e-05, |
|
"loss": 0.9847, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.05697329376854599, |
|
"grad_norm": 1.0395673513412476, |
|
"learning_rate": 2.8503562945368172e-05, |
|
"loss": 0.9946, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.058160237388724036, |
|
"grad_norm": 1.156590461730957, |
|
"learning_rate": 2.909738717339668e-05, |
|
"loss": 1.0282, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.05934718100890208, |
|
"grad_norm": 0.8681560754776001, |
|
"learning_rate": 2.9691211401425178e-05, |
|
"loss": 0.9639, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.05934718100890208, |
|
"eval_loss": 0.8606800436973572, |
|
"eval_runtime": 772.8877, |
|
"eval_samples_per_second": 2.295, |
|
"eval_steps_per_second": 0.574, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.060534124629080116, |
|
"grad_norm": 0.9961158037185669, |
|
"learning_rate": 3.0285035629453685e-05, |
|
"loss": 0.9167, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.06172106824925816, |
|
"grad_norm": 1.1789659261703491, |
|
"learning_rate": 3.0878859857482184e-05, |
|
"loss": 0.9911, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.0629080118694362, |
|
"grad_norm": 0.9042471647262573, |
|
"learning_rate": 3.147268408551069e-05, |
|
"loss": 0.9563, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.06409495548961425, |
|
"grad_norm": 0.8174954652786255, |
|
"learning_rate": 3.20665083135392e-05, |
|
"loss": 0.899, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.06528189910979229, |
|
"grad_norm": 1.0244214534759521, |
|
"learning_rate": 3.2660332541567697e-05, |
|
"loss": 1.0177, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.06646884272997032, |
|
"grad_norm": 1.0030802488327026, |
|
"learning_rate": 3.32541567695962e-05, |
|
"loss": 0.8967, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.06765578635014836, |
|
"grad_norm": 1.0239039659500122, |
|
"learning_rate": 3.384798099762471e-05, |
|
"loss": 1.003, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.06884272997032641, |
|
"grad_norm": 0.8476904034614563, |
|
"learning_rate": 3.444180522565321e-05, |
|
"loss": 0.8915, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.07002967359050445, |
|
"grad_norm": 0.8580217957496643, |
|
"learning_rate": 3.503562945368171e-05, |
|
"loss": 0.9793, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.0712166172106825, |
|
"grad_norm": 1.0635051727294922, |
|
"learning_rate": 3.5629453681710215e-05, |
|
"loss": 0.939, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.07240356083086054, |
|
"grad_norm": 1.0473207235336304, |
|
"learning_rate": 3.6223277909738715e-05, |
|
"loss": 0.9026, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.07359050445103858, |
|
"grad_norm": 0.9743014574050903, |
|
"learning_rate": 3.681710213776722e-05, |
|
"loss": 0.9039, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.07477744807121661, |
|
"grad_norm": 0.9305230379104614, |
|
"learning_rate": 3.741092636579573e-05, |
|
"loss": 0.9828, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.07596439169139466, |
|
"grad_norm": 0.8575608134269714, |
|
"learning_rate": 3.800475059382423e-05, |
|
"loss": 0.9613, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.0771513353115727, |
|
"grad_norm": 1.1435967683792114, |
|
"learning_rate": 3.8598574821852734e-05, |
|
"loss": 0.9162, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.07833827893175074, |
|
"grad_norm": 1.2059640884399414, |
|
"learning_rate": 3.919239904988123e-05, |
|
"loss": 0.9601, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.07952522255192879, |
|
"grad_norm": 1.2867904901504517, |
|
"learning_rate": 3.978622327790974e-05, |
|
"loss": 0.8977, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.08071216617210683, |
|
"grad_norm": 1.225451111793518, |
|
"learning_rate": 4.0380047505938246e-05, |
|
"loss": 0.9324, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.08189910979228486, |
|
"grad_norm": 0.9785982966423035, |
|
"learning_rate": 4.0973871733966746e-05, |
|
"loss": 0.9825, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.0830860534124629, |
|
"grad_norm": 1.006413221359253, |
|
"learning_rate": 4.156769596199525e-05, |
|
"loss": 0.992, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.08427299703264095, |
|
"grad_norm": 0.9456585049629211, |
|
"learning_rate": 4.216152019002376e-05, |
|
"loss": 0.9368, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.08545994065281899, |
|
"grad_norm": 0.8581728339195251, |
|
"learning_rate": 4.275534441805226e-05, |
|
"loss": 0.888, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.08664688427299704, |
|
"grad_norm": 0.8342024683952332, |
|
"learning_rate": 4.3349168646080765e-05, |
|
"loss": 0.8947, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.08783382789317508, |
|
"grad_norm": 1.0038254261016846, |
|
"learning_rate": 4.394299287410927e-05, |
|
"loss": 0.8982, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.08902077151335312, |
|
"grad_norm": 1.085750699043274, |
|
"learning_rate": 4.453681710213777e-05, |
|
"loss": 0.8623, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.09020771513353115, |
|
"grad_norm": 1.2366139888763428, |
|
"learning_rate": 4.513064133016627e-05, |
|
"loss": 0.8971, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.0913946587537092, |
|
"grad_norm": 1.0508228540420532, |
|
"learning_rate": 4.5724465558194777e-05, |
|
"loss": 0.9076, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.09258160237388724, |
|
"grad_norm": 1.1448291540145874, |
|
"learning_rate": 4.6318289786223276e-05, |
|
"loss": 1.0223, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.09376854599406528, |
|
"grad_norm": 0.9256898760795593, |
|
"learning_rate": 4.691211401425178e-05, |
|
"loss": 0.9233, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.09495548961424333, |
|
"grad_norm": 0.7629861235618591, |
|
"learning_rate": 4.750593824228028e-05, |
|
"loss": 0.8658, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.09614243323442137, |
|
"grad_norm": 1.2851282358169556, |
|
"learning_rate": 4.809976247030879e-05, |
|
"loss": 0.9022, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.0973293768545994, |
|
"grad_norm": 1.0901813507080078, |
|
"learning_rate": 4.8693586698337295e-05, |
|
"loss": 0.9875, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.09851632047477744, |
|
"grad_norm": 0.9551505446434021, |
|
"learning_rate": 4.9287410926365795e-05, |
|
"loss": 0.9256, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.09970326409495549, |
|
"grad_norm": 1.760140299797058, |
|
"learning_rate": 4.98812351543943e-05, |
|
"loss": 0.9199, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.10089020771513353, |
|
"grad_norm": 1.2546443939208984, |
|
"learning_rate": 4.9999969188311976e-05, |
|
"loss": 0.9354, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.10207715133531158, |
|
"grad_norm": 1.1511316299438477, |
|
"learning_rate": 4.9999844015959526e-05, |
|
"loss": 0.9547, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.10326409495548962, |
|
"grad_norm": 1.0263421535491943, |
|
"learning_rate": 4.9999622557693894e-05, |
|
"loss": 0.9864, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.10445103857566766, |
|
"grad_norm": 1.0085169076919556, |
|
"learning_rate": 4.9999304814368e-05, |
|
"loss": 0.9439, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.10563798219584569, |
|
"grad_norm": 0.8821007609367371, |
|
"learning_rate": 4.9998890787205634e-05, |
|
"loss": 0.9689, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.10682492581602374, |
|
"grad_norm": 1.2020398378372192, |
|
"learning_rate": 4.999838047780139e-05, |
|
"loss": 0.8825, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.10801186943620178, |
|
"grad_norm": 1.1312310695648193, |
|
"learning_rate": 4.999777388812072e-05, |
|
"loss": 0.954, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.10919881305637982, |
|
"grad_norm": 0.8461742997169495, |
|
"learning_rate": 4.999707102049986e-05, |
|
"loss": 0.9593, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.11038575667655787, |
|
"grad_norm": 0.9337742328643799, |
|
"learning_rate": 4.9996271877645906e-05, |
|
"loss": 0.8842, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.11157270029673591, |
|
"grad_norm": 1.5750162601470947, |
|
"learning_rate": 4.99953764626367e-05, |
|
"loss": 0.9315, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.11275964391691394, |
|
"grad_norm": 0.8479421734809875, |
|
"learning_rate": 4.9994384778920914e-05, |
|
"loss": 0.9483, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.11394658753709198, |
|
"grad_norm": 1.4970643520355225, |
|
"learning_rate": 4.999329683031796e-05, |
|
"loss": 0.8933, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.11513353115727003, |
|
"grad_norm": 1.1047669649124146, |
|
"learning_rate": 4.9992112621018046e-05, |
|
"loss": 1.0143, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.11632047477744807, |
|
"grad_norm": 1.0329428911209106, |
|
"learning_rate": 4.99908321555821e-05, |
|
"loss": 0.9286, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.11750741839762611, |
|
"grad_norm": 0.7338259816169739, |
|
"learning_rate": 4.998945543894179e-05, |
|
"loss": 0.9012, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.11869436201780416, |
|
"grad_norm": 0.9167189598083496, |
|
"learning_rate": 4.998798247639947e-05, |
|
"loss": 0.8714, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.11869436201780416, |
|
"eval_loss": 0.8358961939811707, |
|
"eval_runtime": 772.8376, |
|
"eval_samples_per_second": 2.295, |
|
"eval_steps_per_second": 0.575, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.1198813056379822, |
|
"grad_norm": 0.7216008305549622, |
|
"learning_rate": 4.998641327362823e-05, |
|
"loss": 0.797, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.12106824925816023, |
|
"grad_norm": 0.8900012373924255, |
|
"learning_rate": 4.9984747836671754e-05, |
|
"loss": 0.9256, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.12225519287833828, |
|
"grad_norm": 1.069724202156067, |
|
"learning_rate": 4.9982986171944436e-05, |
|
"loss": 0.8876, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.12344213649851632, |
|
"grad_norm": 1.2220836877822876, |
|
"learning_rate": 4.9981128286231235e-05, |
|
"loss": 0.9672, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.12462908011869436, |
|
"grad_norm": 1.055732011795044, |
|
"learning_rate": 4.997917418668774e-05, |
|
"loss": 0.8998, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.1258160237388724, |
|
"grad_norm": 1.0066126585006714, |
|
"learning_rate": 4.997712388084009e-05, |
|
"loss": 0.965, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.12700296735905045, |
|
"grad_norm": 0.9691961407661438, |
|
"learning_rate": 4.9974977376584945e-05, |
|
"loss": 0.8449, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.1281899109792285, |
|
"grad_norm": 0.9969437122344971, |
|
"learning_rate": 4.997273468218949e-05, |
|
"loss": 0.9465, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.12937685459940654, |
|
"grad_norm": 0.9269385933876038, |
|
"learning_rate": 4.997039580629137e-05, |
|
"loss": 0.9694, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.13056379821958458, |
|
"grad_norm": 1.0190030336380005, |
|
"learning_rate": 4.996796075789868e-05, |
|
"loss": 0.8879, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.13175074183976263, |
|
"grad_norm": 0.9268912076950073, |
|
"learning_rate": 4.9965429546389906e-05, |
|
"loss": 0.9089, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.13293768545994064, |
|
"grad_norm": 0.8694944977760315, |
|
"learning_rate": 4.996280218151391e-05, |
|
"loss": 0.8724, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.13412462908011868, |
|
"grad_norm": 0.862114667892456, |
|
"learning_rate": 4.996007867338989e-05, |
|
"loss": 0.93, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.13531157270029673, |
|
"grad_norm": 1.0992276668548584, |
|
"learning_rate": 4.995725903250732e-05, |
|
"loss": 0.9419, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.13649851632047477, |
|
"grad_norm": 1.1718074083328247, |
|
"learning_rate": 4.995434326972595e-05, |
|
"loss": 0.9153, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.13768545994065282, |
|
"grad_norm": 1.34727942943573, |
|
"learning_rate": 4.99513313962757e-05, |
|
"loss": 0.8732, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.13887240356083086, |
|
"grad_norm": 0.717542290687561, |
|
"learning_rate": 4.994822342375671e-05, |
|
"loss": 0.8217, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.1400593471810089, |
|
"grad_norm": 1.2748587131500244, |
|
"learning_rate": 4.99450193641392e-05, |
|
"loss": 0.8905, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.14124629080118695, |
|
"grad_norm": 1.3889360427856445, |
|
"learning_rate": 4.994171922976348e-05, |
|
"loss": 0.8903, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.142433234421365, |
|
"grad_norm": 0.7903980016708374, |
|
"learning_rate": 4.993832303333989e-05, |
|
"loss": 0.9225, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.14362017804154303, |
|
"grad_norm": 1.0810192823410034, |
|
"learning_rate": 4.993483078794875e-05, |
|
"loss": 0.8787, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.14480712166172108, |
|
"grad_norm": 1.3086780309677124, |
|
"learning_rate": 4.9931242507040294e-05, |
|
"loss": 0.8956, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.14599406528189912, |
|
"grad_norm": 1.00796639919281, |
|
"learning_rate": 4.992755820443465e-05, |
|
"loss": 0.8845, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.14718100890207717, |
|
"grad_norm": 1.0078282356262207, |
|
"learning_rate": 4.992377789432177e-05, |
|
"loss": 0.9332, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.14836795252225518, |
|
"grad_norm": 1.0940144062042236, |
|
"learning_rate": 4.9919901591261366e-05, |
|
"loss": 0.9136, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.14955489614243322, |
|
"grad_norm": 0.9257196187973022, |
|
"learning_rate": 4.991592931018286e-05, |
|
"loss": 0.8778, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.15074183976261127, |
|
"grad_norm": 0.6467018723487854, |
|
"learning_rate": 4.991186106638534e-05, |
|
"loss": 0.9091, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.1519287833827893, |
|
"grad_norm": 0.9637243747711182, |
|
"learning_rate": 4.990769687553749e-05, |
|
"loss": 0.9091, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.15311572700296736, |
|
"grad_norm": 1.1351808309555054, |
|
"learning_rate": 4.990343675367752e-05, |
|
"loss": 0.9378, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.1543026706231454, |
|
"grad_norm": 1.2510889768600464, |
|
"learning_rate": 4.989908071721313e-05, |
|
"loss": 0.8714, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.15548961424332344, |
|
"grad_norm": 1.0249766111373901, |
|
"learning_rate": 4.989462878292142e-05, |
|
"loss": 0.9535, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.1566765578635015, |
|
"grad_norm": 1.5715773105621338, |
|
"learning_rate": 4.9890080967948836e-05, |
|
"loss": 0.8567, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.15786350148367953, |
|
"grad_norm": 1.0343701839447021, |
|
"learning_rate": 4.9885437289811106e-05, |
|
"loss": 0.9312, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.15905044510385757, |
|
"grad_norm": 0.952111542224884, |
|
"learning_rate": 4.988069776639318e-05, |
|
"loss": 0.8942, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.16023738872403562, |
|
"grad_norm": 1.1799042224884033, |
|
"learning_rate": 4.987586241594914e-05, |
|
"loss": 0.9557, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.16142433234421366, |
|
"grad_norm": 0.9772776365280151, |
|
"learning_rate": 4.987093125710215e-05, |
|
"loss": 0.9164, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.1626112759643917, |
|
"grad_norm": 0.9001238346099854, |
|
"learning_rate": 4.986590430884437e-05, |
|
"loss": 0.8403, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.16379821958456972, |
|
"grad_norm": 1.0725136995315552, |
|
"learning_rate": 4.98607815905369e-05, |
|
"loss": 0.9615, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.16498516320474776, |
|
"grad_norm": 1.0103825330734253, |
|
"learning_rate": 4.985556312190968e-05, |
|
"loss": 0.904, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.1661721068249258, |
|
"grad_norm": 1.019081473350525, |
|
"learning_rate": 4.985024892306145e-05, |
|
"loss": 0.8856, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.16735905044510385, |
|
"grad_norm": 0.9215166568756104, |
|
"learning_rate": 4.984483901445961e-05, |
|
"loss": 0.9263, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.1685459940652819, |
|
"grad_norm": 0.9046373963356018, |
|
"learning_rate": 4.9839333416940245e-05, |
|
"loss": 0.8989, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.16973293768545994, |
|
"grad_norm": 0.683239758014679, |
|
"learning_rate": 4.9833732151707915e-05, |
|
"loss": 0.8229, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.17091988130563798, |
|
"grad_norm": 0.9845369458198547, |
|
"learning_rate": 4.982803524033569e-05, |
|
"loss": 0.8801, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.17210682492581603, |
|
"grad_norm": 0.92441326379776, |
|
"learning_rate": 4.9822242704764996e-05, |
|
"loss": 0.9166, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.17329376854599407, |
|
"grad_norm": 1.1213726997375488, |
|
"learning_rate": 4.9816354567305555e-05, |
|
"loss": 0.855, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.1744807121661721, |
|
"grad_norm": 0.9685346484184265, |
|
"learning_rate": 4.98103708506353e-05, |
|
"loss": 0.9458, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.17566765578635016, |
|
"grad_norm": 0.827942967414856, |
|
"learning_rate": 4.980429157780027e-05, |
|
"loss": 0.9032, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.1768545994065282, |
|
"grad_norm": 0.951607882976532, |
|
"learning_rate": 4.979811677221455e-05, |
|
"loss": 0.8916, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.17804154302670624, |
|
"grad_norm": 0.8278565406799316, |
|
"learning_rate": 4.979184645766016e-05, |
|
"loss": 0.9589, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.17804154302670624, |
|
"eval_loss": 0.8247362971305847, |
|
"eval_runtime": 773.006, |
|
"eval_samples_per_second": 2.295, |
|
"eval_steps_per_second": 0.574, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.17922848664688426, |
|
"grad_norm": 1.4448524713516235, |
|
"learning_rate": 4.978548065828697e-05, |
|
"loss": 0.9034, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.1804154302670623, |
|
"grad_norm": 0.9414309859275818, |
|
"learning_rate": 4.9779019398612605e-05, |
|
"loss": 0.8396, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.18160237388724035, |
|
"grad_norm": 1.0360982418060303, |
|
"learning_rate": 4.977246270352235e-05, |
|
"loss": 0.8701, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.1827893175074184, |
|
"grad_norm": 0.9132982492446899, |
|
"learning_rate": 4.976581059826906e-05, |
|
"loss": 0.9002, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.18397626112759644, |
|
"grad_norm": 1.1046500205993652, |
|
"learning_rate": 4.9759063108473046e-05, |
|
"loss": 0.9659, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.18516320474777448, |
|
"grad_norm": 0.7856910228729248, |
|
"learning_rate": 4.975222026012202e-05, |
|
"loss": 0.8569, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.18635014836795252, |
|
"grad_norm": 1.0288382768630981, |
|
"learning_rate": 4.974528207957092e-05, |
|
"loss": 0.8933, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.18753709198813057, |
|
"grad_norm": 1.1528115272521973, |
|
"learning_rate": 4.9738248593541894e-05, |
|
"loss": 0.9673, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.1887240356083086, |
|
"grad_norm": 1.0612051486968994, |
|
"learning_rate": 4.973111982912412e-05, |
|
"loss": 0.8482, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.18991097922848665, |
|
"grad_norm": 0.6942639350891113, |
|
"learning_rate": 4.972389581377376e-05, |
|
"loss": 0.87, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.1910979228486647, |
|
"grad_norm": 1.0134094953536987, |
|
"learning_rate": 4.971657657531383e-05, |
|
"loss": 0.9021, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.19228486646884274, |
|
"grad_norm": 1.2450144290924072, |
|
"learning_rate": 4.9709162141934076e-05, |
|
"loss": 0.9054, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.19347181008902078, |
|
"grad_norm": 0.9599930644035339, |
|
"learning_rate": 4.97016525421909e-05, |
|
"loss": 0.9167, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.1946587537091988, |
|
"grad_norm": 1.022996425628662, |
|
"learning_rate": 4.9694047805007236e-05, |
|
"loss": 0.8035, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.19584569732937684, |
|
"grad_norm": 0.9903687834739685, |
|
"learning_rate": 4.968634795967243e-05, |
|
"loss": 0.8117, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.1970326409495549, |
|
"grad_norm": 0.7823516726493835, |
|
"learning_rate": 4.9678553035842125e-05, |
|
"loss": 0.836, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.19821958456973293, |
|
"grad_norm": 0.896584689617157, |
|
"learning_rate": 4.967066306353816e-05, |
|
"loss": 0.9844, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.19940652818991098, |
|
"grad_norm": 1.0771842002868652, |
|
"learning_rate": 4.966267807314845e-05, |
|
"loss": 0.8248, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.20059347181008902, |
|
"grad_norm": 0.7897894978523254, |
|
"learning_rate": 4.965459809542689e-05, |
|
"loss": 0.801, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.20178041543026706, |
|
"grad_norm": 0.8380696773529053, |
|
"learning_rate": 4.964642316149318e-05, |
|
"loss": 0.9112, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.2029673590504451, |
|
"grad_norm": 0.9411647915840149, |
|
"learning_rate": 4.963815330283276e-05, |
|
"loss": 0.9455, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.20415430267062315, |
|
"grad_norm": 0.9571262001991272, |
|
"learning_rate": 4.9629788551296656e-05, |
|
"loss": 0.897, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.2053412462908012, |
|
"grad_norm": 0.8774480819702148, |
|
"learning_rate": 4.962132893910139e-05, |
|
"loss": 0.9364, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.20652818991097924, |
|
"grad_norm": 0.8009739518165588, |
|
"learning_rate": 4.961277449882882e-05, |
|
"loss": 0.8938, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.20771513353115728, |
|
"grad_norm": 0.8146045207977295, |
|
"learning_rate": 4.960412526342604e-05, |
|
"loss": 0.8371, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.20890207715133532, |
|
"grad_norm": 0.9336399435997009, |
|
"learning_rate": 4.9595381266205234e-05, |
|
"loss": 0.8568, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.21008902077151334, |
|
"grad_norm": 1.1002113819122314, |
|
"learning_rate": 4.958654254084355e-05, |
|
"loss": 0.8137, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.21127596439169138, |
|
"grad_norm": 1.308406949043274, |
|
"learning_rate": 4.9577609121383015e-05, |
|
"loss": 0.96, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.21246290801186943, |
|
"grad_norm": 0.9280428290367126, |
|
"learning_rate": 4.956858104223033e-05, |
|
"loss": 0.8977, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.21364985163204747, |
|
"grad_norm": 0.9551159739494324, |
|
"learning_rate": 4.9559458338156787e-05, |
|
"loss": 0.8581, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.21483679525222552, |
|
"grad_norm": 0.8361184597015381, |
|
"learning_rate": 4.955024104429812e-05, |
|
"loss": 0.9148, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.21602373887240356, |
|
"grad_norm": 0.9807586669921875, |
|
"learning_rate": 4.954092919615436e-05, |
|
"loss": 0.8276, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.2172106824925816, |
|
"grad_norm": 0.8617917895317078, |
|
"learning_rate": 4.953152282958975e-05, |
|
"loss": 0.965, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.21839762611275965, |
|
"grad_norm": 0.8824058175086975, |
|
"learning_rate": 4.952202198083252e-05, |
|
"loss": 0.9464, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.2195845697329377, |
|
"grad_norm": 1.0937623977661133, |
|
"learning_rate": 4.9512426686474825e-05, |
|
"loss": 0.9038, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.22077151335311573, |
|
"grad_norm": 0.8538326025009155, |
|
"learning_rate": 4.950273698347254e-05, |
|
"loss": 0.9066, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.22195845697329378, |
|
"grad_norm": 0.882277250289917, |
|
"learning_rate": 4.9492952909145206e-05, |
|
"loss": 0.8483, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.22314540059347182, |
|
"grad_norm": 1.1620628833770752, |
|
"learning_rate": 4.948307450117578e-05, |
|
"loss": 0.9323, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.22433234421364986, |
|
"grad_norm": 0.9091768860816956, |
|
"learning_rate": 4.947310179761057e-05, |
|
"loss": 0.9345, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.22551928783382788, |
|
"grad_norm": 0.9526820182800293, |
|
"learning_rate": 4.9463034836859035e-05, |
|
"loss": 0.8783, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.22670623145400592, |
|
"grad_norm": 1.2667665481567383, |
|
"learning_rate": 4.94528736576937e-05, |
|
"loss": 0.8743, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.22789317507418397, |
|
"grad_norm": 0.9735057950019836, |
|
"learning_rate": 4.944261829924992e-05, |
|
"loss": 0.8674, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.229080118694362, |
|
"grad_norm": 1.0499471426010132, |
|
"learning_rate": 4.943226880102583e-05, |
|
"loss": 0.984, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.23026706231454006, |
|
"grad_norm": 1.031782865524292, |
|
"learning_rate": 4.94218252028821e-05, |
|
"loss": 0.8371, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.2314540059347181, |
|
"grad_norm": 0.6854504346847534, |
|
"learning_rate": 4.9411287545041826e-05, |
|
"loss": 0.9478, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.23264094955489614, |
|
"grad_norm": 0.7416971325874329, |
|
"learning_rate": 4.940065586809041e-05, |
|
"loss": 0.8734, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.2338278931750742, |
|
"grad_norm": 0.9267434477806091, |
|
"learning_rate": 4.938993021297531e-05, |
|
"loss": 0.9185, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.23501483679525223, |
|
"grad_norm": 1.0826581716537476, |
|
"learning_rate": 4.937911062100597e-05, |
|
"loss": 0.8529, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.23620178041543027, |
|
"grad_norm": 1.1237525939941406, |
|
"learning_rate": 4.936819713385363e-05, |
|
"loss": 0.884, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.23738872403560832, |
|
"grad_norm": 0.8531584739685059, |
|
"learning_rate": 4.935718979355115e-05, |
|
"loss": 0.8282, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.23738872403560832, |
|
"eval_loss": 0.8174661993980408, |
|
"eval_runtime": 770.587, |
|
"eval_samples_per_second": 2.302, |
|
"eval_steps_per_second": 0.576, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.23857566765578636, |
|
"grad_norm": 1.2661949396133423, |
|
"learning_rate": 4.9346088642492867e-05, |
|
"loss": 0.903, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.2397626112759644, |
|
"grad_norm": 0.8154357671737671, |
|
"learning_rate": 4.933489372343443e-05, |
|
"loss": 0.8108, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.24094955489614242, |
|
"grad_norm": 1.1874347925186157, |
|
"learning_rate": 4.9323605079492616e-05, |
|
"loss": 0.8955, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.24213649851632046, |
|
"grad_norm": 0.9091614484786987, |
|
"learning_rate": 4.931222275414521e-05, |
|
"loss": 0.8428, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.2433234421364985, |
|
"grad_norm": 0.9067183136940002, |
|
"learning_rate": 4.9300746791230775e-05, |
|
"loss": 0.9133, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.24451038575667655, |
|
"grad_norm": 0.9364748597145081, |
|
"learning_rate": 4.9289177234948535e-05, |
|
"loss": 0.8841, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.2456973293768546, |
|
"grad_norm": 1.1770869493484497, |
|
"learning_rate": 4.927751412985818e-05, |
|
"loss": 0.9274, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.24688427299703264, |
|
"grad_norm": 0.9044451117515564, |
|
"learning_rate": 4.926575752087971e-05, |
|
"loss": 0.9128, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.24807121661721068, |
|
"grad_norm": 0.9170666933059692, |
|
"learning_rate": 4.925390745329321e-05, |
|
"loss": 0.9526, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.24925816023738873, |
|
"grad_norm": 0.7166856527328491, |
|
"learning_rate": 4.924196397273878e-05, |
|
"loss": 0.8833, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.25044510385756674, |
|
"grad_norm": 1.122950792312622, |
|
"learning_rate": 4.9229927125216245e-05, |
|
"loss": 0.9077, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.2516320474777448, |
|
"grad_norm": 0.7830738425254822, |
|
"learning_rate": 4.921779695708506e-05, |
|
"loss": 0.9394, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.25281899109792283, |
|
"grad_norm": 0.8085533380508423, |
|
"learning_rate": 4.920557351506409e-05, |
|
"loss": 0.9048, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.2540059347181009, |
|
"grad_norm": 0.8500973582267761, |
|
"learning_rate": 4.9193256846231425e-05, |
|
"loss": 0.9618, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.2551928783382789, |
|
"grad_norm": 1.318861961364746, |
|
"learning_rate": 4.9180846998024235e-05, |
|
"loss": 0.9177, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.256379821958457, |
|
"grad_norm": 0.7787696123123169, |
|
"learning_rate": 4.916834401823857e-05, |
|
"loss": 0.9182, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.257566765578635, |
|
"grad_norm": 1.0064891576766968, |
|
"learning_rate": 4.9155747955029156e-05, |
|
"loss": 0.8871, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.2587537091988131, |
|
"grad_norm": 0.8516352772712708, |
|
"learning_rate": 4.9143058856909235e-05, |
|
"loss": 0.8684, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.2599406528189911, |
|
"grad_norm": 0.846522867679596, |
|
"learning_rate": 4.9130276772750374e-05, |
|
"loss": 0.8566, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.26112759643916916, |
|
"grad_norm": 0.8422502875328064, |
|
"learning_rate": 4.911740175178226e-05, |
|
"loss": 0.9234, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.2623145400593472, |
|
"grad_norm": 1.1003506183624268, |
|
"learning_rate": 4.910443384359251e-05, |
|
"loss": 0.9092, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.26350148367952525, |
|
"grad_norm": 1.1265966892242432, |
|
"learning_rate": 4.9091373098126546e-05, |
|
"loss": 0.856, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.26468842729970327, |
|
"grad_norm": 0.9657439589500427, |
|
"learning_rate": 4.90782195656873e-05, |
|
"loss": 0.8534, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.2658753709198813, |
|
"grad_norm": 0.6582869291305542, |
|
"learning_rate": 4.9064973296935075e-05, |
|
"loss": 0.9634, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.26706231454005935, |
|
"grad_norm": 1.1937134265899658, |
|
"learning_rate": 4.9051634342887356e-05, |
|
"loss": 0.911, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.26824925816023737, |
|
"grad_norm": 0.9606165289878845, |
|
"learning_rate": 4.90382027549186e-05, |
|
"loss": 0.8507, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.26943620178041544, |
|
"grad_norm": 0.9722040295600891, |
|
"learning_rate": 4.9024678584760056e-05, |
|
"loss": 0.9747, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.27062314540059346, |
|
"grad_norm": 0.8512585163116455, |
|
"learning_rate": 4.90110618844995e-05, |
|
"loss": 0.8801, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.27181008902077153, |
|
"grad_norm": 0.8925254940986633, |
|
"learning_rate": 4.899735270658114e-05, |
|
"loss": 0.8713, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.27299703264094954, |
|
"grad_norm": 1.0183494091033936, |
|
"learning_rate": 4.898355110380531e-05, |
|
"loss": 0.8947, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.2741839762611276, |
|
"grad_norm": 0.7539317011833191, |
|
"learning_rate": 4.896965712932837e-05, |
|
"loss": 0.9179, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.27537091988130563, |
|
"grad_norm": 1.0620583295822144, |
|
"learning_rate": 4.895567083666239e-05, |
|
"loss": 0.9275, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.2765578635014837, |
|
"grad_norm": 1.908141851425171, |
|
"learning_rate": 4.8941592279675037e-05, |
|
"loss": 0.9131, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.2777448071216617, |
|
"grad_norm": 0.7129389643669128, |
|
"learning_rate": 4.892742151258931e-05, |
|
"loss": 0.8657, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.2789317507418398, |
|
"grad_norm": 0.8836341500282288, |
|
"learning_rate": 4.8913158589983374e-05, |
|
"loss": 0.8815, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.2801186943620178, |
|
"grad_norm": 0.9302510619163513, |
|
"learning_rate": 4.8898803566790296e-05, |
|
"loss": 0.86, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.2813056379821958, |
|
"grad_norm": 0.717185378074646, |
|
"learning_rate": 4.8884356498297884e-05, |
|
"loss": 0.8734, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.2824925816023739, |
|
"grad_norm": 1.0630261898040771, |
|
"learning_rate": 4.886981744014846e-05, |
|
"loss": 0.8946, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.2836795252225519, |
|
"grad_norm": 0.7842198014259338, |
|
"learning_rate": 4.885518644833863e-05, |
|
"loss": 0.877, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.28486646884273, |
|
"grad_norm": 0.9807887077331543, |
|
"learning_rate": 4.8840463579219086e-05, |
|
"loss": 0.8142, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.286053412462908, |
|
"grad_norm": 0.9767608642578125, |
|
"learning_rate": 4.882564888949437e-05, |
|
"loss": 0.9082, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.28724035608308607, |
|
"grad_norm": 1.0220454931259155, |
|
"learning_rate": 4.881074243622268e-05, |
|
"loss": 0.9359, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.2884272997032641, |
|
"grad_norm": 0.791378378868103, |
|
"learning_rate": 4.8795744276815624e-05, |
|
"loss": 0.8607, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.28961424332344216, |
|
"grad_norm": 0.9800243973731995, |
|
"learning_rate": 4.8780654469038036e-05, |
|
"loss": 0.9268, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.29080118694362017, |
|
"grad_norm": 0.9791480302810669, |
|
"learning_rate": 4.8765473071007706e-05, |
|
"loss": 0.8905, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.29198813056379824, |
|
"grad_norm": 0.9130463004112244, |
|
"learning_rate": 4.8750200141195194e-05, |
|
"loss": 0.8537, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.29317507418397626, |
|
"grad_norm": 0.7925013899803162, |
|
"learning_rate": 4.8734835738423586e-05, |
|
"loss": 0.9325, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.29436201780415433, |
|
"grad_norm": 1.0440537929534912, |
|
"learning_rate": 4.871937992186827e-05, |
|
"loss": 0.9535, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.29554896142433235, |
|
"grad_norm": 0.9307257533073425, |
|
"learning_rate": 4.870383275105671e-05, |
|
"loss": 0.8196, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.29673590504451036, |
|
"grad_norm": 0.9311190843582153, |
|
"learning_rate": 4.8688194285868235e-05, |
|
"loss": 0.9039, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.29673590504451036, |
|
"eval_loss": 0.8113005757331848, |
|
"eval_runtime": 769.3974, |
|
"eval_samples_per_second": 2.306, |
|
"eval_steps_per_second": 0.577, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.29792284866468843, |
|
"grad_norm": 0.8999786376953125, |
|
"learning_rate": 4.867246458653377e-05, |
|
"loss": 0.8452, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.29910979228486645, |
|
"grad_norm": 0.8556974530220032, |
|
"learning_rate": 4.865664371363563e-05, |
|
"loss": 0.8501, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.3002967359050445, |
|
"grad_norm": 0.8129183650016785, |
|
"learning_rate": 4.864073172810728e-05, |
|
"loss": 0.8773, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.30148367952522254, |
|
"grad_norm": 1.002024531364441, |
|
"learning_rate": 4.862472869123311e-05, |
|
"loss": 0.897, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.3026706231454006, |
|
"grad_norm": 0.8188593983650208, |
|
"learning_rate": 4.860863466464818e-05, |
|
"loss": 0.88, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.3038575667655786, |
|
"grad_norm": 1.4040788412094116, |
|
"learning_rate": 4.8592449710338004e-05, |
|
"loss": 0.9121, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.3050445103857567, |
|
"grad_norm": 1.0157005786895752, |
|
"learning_rate": 4.8576173890638295e-05, |
|
"loss": 0.8512, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.3062314540059347, |
|
"grad_norm": 0.9177596569061279, |
|
"learning_rate": 4.855980726823472e-05, |
|
"loss": 0.9312, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.3074183976261128, |
|
"grad_norm": 1.0521137714385986, |
|
"learning_rate": 4.8543349906162705e-05, |
|
"loss": 0.8539, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.3086053412462908, |
|
"grad_norm": 0.7588869333267212, |
|
"learning_rate": 4.85268018678071e-05, |
|
"loss": 0.8604, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.30979228486646887, |
|
"grad_norm": 0.6621432900428772, |
|
"learning_rate": 4.851016321690205e-05, |
|
"loss": 0.8125, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.3109792284866469, |
|
"grad_norm": 1.1285654306411743, |
|
"learning_rate": 4.849343401753064e-05, |
|
"loss": 0.9558, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.3121661721068249, |
|
"grad_norm": 0.9147692918777466, |
|
"learning_rate": 4.847661433412474e-05, |
|
"loss": 0.8431, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.313353115727003, |
|
"grad_norm": 0.9889045357704163, |
|
"learning_rate": 4.845970423146467e-05, |
|
"loss": 0.9009, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.314540059347181, |
|
"grad_norm": 1.1277235746383667, |
|
"learning_rate": 4.844270377467905e-05, |
|
"loss": 0.8435, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.31572700296735906, |
|
"grad_norm": 1.0129153728485107, |
|
"learning_rate": 4.842561302924447e-05, |
|
"loss": 0.8972, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.3169139465875371, |
|
"grad_norm": 0.8509721159934998, |
|
"learning_rate": 4.8408432060985244e-05, |
|
"loss": 0.8769, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.31810089020771515, |
|
"grad_norm": 0.8103829622268677, |
|
"learning_rate": 4.83911609360732e-05, |
|
"loss": 0.8339, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.31928783382789316, |
|
"grad_norm": 0.6785222887992859, |
|
"learning_rate": 4.83737997210274e-05, |
|
"loss": 0.8878, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.32047477744807124, |
|
"grad_norm": 0.9442030191421509, |
|
"learning_rate": 4.835634848271387e-05, |
|
"loss": 0.8441, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.32166172106824925, |
|
"grad_norm": 0.7956482768058777, |
|
"learning_rate": 4.833880728834538e-05, |
|
"loss": 0.823, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.3228486646884273, |
|
"grad_norm": 0.8883194327354431, |
|
"learning_rate": 4.832117620548114e-05, |
|
"loss": 0.8942, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.32403560830860534, |
|
"grad_norm": 0.713594377040863, |
|
"learning_rate": 4.8303455302026565e-05, |
|
"loss": 0.8289, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.3252225519287834, |
|
"grad_norm": 0.9183544516563416, |
|
"learning_rate": 4.828564464623302e-05, |
|
"loss": 0.8776, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.3264094955489614, |
|
"grad_norm": 1.0352176427841187, |
|
"learning_rate": 4.826774430669755e-05, |
|
"loss": 0.8295, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.32759643916913944, |
|
"grad_norm": 0.8310732245445251, |
|
"learning_rate": 4.824975435236258e-05, |
|
"loss": 0.9217, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.3287833827893175, |
|
"grad_norm": 0.8640980124473572, |
|
"learning_rate": 4.8231674852515754e-05, |
|
"loss": 0.7977, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.32997032640949553, |
|
"grad_norm": 1.2364047765731812, |
|
"learning_rate": 4.821350587678952e-05, |
|
"loss": 0.8313, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.3311572700296736, |
|
"grad_norm": 0.9419341683387756, |
|
"learning_rate": 4.819524749516098e-05, |
|
"loss": 0.8888, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.3323442136498516, |
|
"grad_norm": 1.1738229990005493, |
|
"learning_rate": 4.817689977795157e-05, |
|
"loss": 0.9192, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.3335311572700297, |
|
"grad_norm": 0.9781436920166016, |
|
"learning_rate": 4.81584627958268e-05, |
|
"loss": 0.9115, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.3347181008902077, |
|
"grad_norm": 1.0958514213562012, |
|
"learning_rate": 4.813993661979598e-05, |
|
"loss": 0.9746, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.3359050445103858, |
|
"grad_norm": 0.9003396034240723, |
|
"learning_rate": 4.812132132121195e-05, |
|
"loss": 0.9283, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.3370919881305638, |
|
"grad_norm": 1.0439910888671875, |
|
"learning_rate": 4.8102616971770786e-05, |
|
"loss": 0.8966, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.33827893175074186, |
|
"grad_norm": 0.9176671504974365, |
|
"learning_rate": 4.808382364351157e-05, |
|
"loss": 0.9132, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.3394658753709199, |
|
"grad_norm": 1.3821845054626465, |
|
"learning_rate": 4.806494140881605e-05, |
|
"loss": 0.807, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.34065281899109795, |
|
"grad_norm": 0.6699957847595215, |
|
"learning_rate": 4.804597034040841e-05, |
|
"loss": 0.8337, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.34183976261127597, |
|
"grad_norm": 0.8552253842353821, |
|
"learning_rate": 4.802691051135497e-05, |
|
"loss": 0.8939, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.343026706231454, |
|
"grad_norm": 0.8702000379562378, |
|
"learning_rate": 4.800776199506392e-05, |
|
"loss": 0.8901, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.34421364985163205, |
|
"grad_norm": 0.9271148443222046, |
|
"learning_rate": 4.798852486528501e-05, |
|
"loss": 0.9171, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.34540059347181007, |
|
"grad_norm": 0.7314199805259705, |
|
"learning_rate": 4.796919919610929e-05, |
|
"loss": 0.9328, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.34658753709198814, |
|
"grad_norm": 0.7465083599090576, |
|
"learning_rate": 4.79497850619688e-05, |
|
"loss": 0.8635, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.34777448071216616, |
|
"grad_norm": 0.9858341217041016, |
|
"learning_rate": 4.793028253763633e-05, |
|
"loss": 0.9312, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.3489614243323442, |
|
"grad_norm": 0.8937369585037231, |
|
"learning_rate": 4.791069169822507e-05, |
|
"loss": 0.8388, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.35014836795252224, |
|
"grad_norm": 1.0096062421798706, |
|
"learning_rate": 4.789101261918837e-05, |
|
"loss": 0.8698, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.3513353115727003, |
|
"grad_norm": 0.7233800888061523, |
|
"learning_rate": 4.787124537631942e-05, |
|
"loss": 0.8332, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.35252225519287833, |
|
"grad_norm": 0.7524568438529968, |
|
"learning_rate": 4.785139004575099e-05, |
|
"loss": 0.9154, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.3537091988130564, |
|
"grad_norm": 1.2735203504562378, |
|
"learning_rate": 4.783144670395509e-05, |
|
"loss": 0.8637, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.3548961424332344, |
|
"grad_norm": 1.1231369972229004, |
|
"learning_rate": 4.781141542774272e-05, |
|
"loss": 0.8696, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.3560830860534125, |
|
"grad_norm": 0.8862468004226685, |
|
"learning_rate": 4.7791296294263556e-05, |
|
"loss": 0.9512, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.3560830860534125, |
|
"eval_loss": 0.8069484829902649, |
|
"eval_runtime": 774.9961, |
|
"eval_samples_per_second": 2.289, |
|
"eval_steps_per_second": 0.573, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.3572700296735905, |
|
"grad_norm": 0.8185063600540161, |
|
"learning_rate": 4.7771089381005636e-05, |
|
"loss": 0.9453, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.3584569732937685, |
|
"grad_norm": 0.8352025747299194, |
|
"learning_rate": 4.7750794765795095e-05, |
|
"loss": 0.8775, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.3596439169139466, |
|
"grad_norm": 0.7337610125541687, |
|
"learning_rate": 4.773041252679584e-05, |
|
"loss": 0.7979, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.3608308605341246, |
|
"grad_norm": 0.980402946472168, |
|
"learning_rate": 4.770994274250926e-05, |
|
"loss": 0.923, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.3620178041543027, |
|
"grad_norm": 0.970067024230957, |
|
"learning_rate": 4.768938549177393e-05, |
|
"loss": 0.9579, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.3632047477744807, |
|
"grad_norm": 0.7478693127632141, |
|
"learning_rate": 4.766874085376528e-05, |
|
"loss": 0.9448, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.36439169139465877, |
|
"grad_norm": 0.8010281920433044, |
|
"learning_rate": 4.764800890799532e-05, |
|
"loss": 0.7986, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.3655786350148368, |
|
"grad_norm": 0.8199505805969238, |
|
"learning_rate": 4.762718973431234e-05, |
|
"loss": 0.8876, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.36676557863501486, |
|
"grad_norm": 0.9049654006958008, |
|
"learning_rate": 4.760628341290054e-05, |
|
"loss": 0.8882, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.36795252225519287, |
|
"grad_norm": 0.9176567196846008, |
|
"learning_rate": 4.7585290024279806e-05, |
|
"loss": 0.9358, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.36913946587537094, |
|
"grad_norm": 0.9601384997367859, |
|
"learning_rate": 4.756420964930535e-05, |
|
"loss": 0.9252, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.37032640949554896, |
|
"grad_norm": 1.0265899896621704, |
|
"learning_rate": 4.754304236916739e-05, |
|
"loss": 0.9221, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.37151335311572703, |
|
"grad_norm": 1.52367103099823, |
|
"learning_rate": 4.752178826539089e-05, |
|
"loss": 0.8951, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.37270029673590505, |
|
"grad_norm": 0.9129610657691956, |
|
"learning_rate": 4.750044741983517e-05, |
|
"loss": 0.9562, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.37388724035608306, |
|
"grad_norm": 0.8349341154098511, |
|
"learning_rate": 4.747901991469367e-05, |
|
"loss": 0.8603, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.37507418397626113, |
|
"grad_norm": 1.1370227336883545, |
|
"learning_rate": 4.7457505832493584e-05, |
|
"loss": 0.8294, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.37626112759643915, |
|
"grad_norm": 0.9587147235870361, |
|
"learning_rate": 4.743590525609554e-05, |
|
"loss": 0.8465, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.3774480712166172, |
|
"grad_norm": 0.8471585512161255, |
|
"learning_rate": 4.741421826869331e-05, |
|
"loss": 0.8936, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.37863501483679524, |
|
"grad_norm": 0.9739141464233398, |
|
"learning_rate": 4.7392444953813476e-05, |
|
"loss": 0.8797, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.3798219584569733, |
|
"grad_norm": 0.9010668992996216, |
|
"learning_rate": 4.737058539531509e-05, |
|
"loss": 0.801, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.3810089020771513, |
|
"grad_norm": 1.0589792728424072, |
|
"learning_rate": 4.7348639677389395e-05, |
|
"loss": 0.8597, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.3821958456973294, |
|
"grad_norm": 1.1330857276916504, |
|
"learning_rate": 4.732660788455944e-05, |
|
"loss": 0.9204, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.3833827893175074, |
|
"grad_norm": 1.1525068283081055, |
|
"learning_rate": 4.730449010167982e-05, |
|
"loss": 0.8399, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.3845697329376855, |
|
"grad_norm": 1.045083999633789, |
|
"learning_rate": 4.72822864139363e-05, |
|
"loss": 0.8264, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.3857566765578635, |
|
"grad_norm": 0.9954350590705872, |
|
"learning_rate": 4.72599969068455e-05, |
|
"loss": 0.8966, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.38694362017804157, |
|
"grad_norm": 0.9580268859863281, |
|
"learning_rate": 4.7237621666254596e-05, |
|
"loss": 0.8649, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.3881305637982196, |
|
"grad_norm": 0.945152997970581, |
|
"learning_rate": 4.7215160778340925e-05, |
|
"loss": 0.8898, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.3893175074183976, |
|
"grad_norm": 0.8537838459014893, |
|
"learning_rate": 4.7192614329611715e-05, |
|
"loss": 0.8578, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.3905044510385757, |
|
"grad_norm": 0.8144123554229736, |
|
"learning_rate": 4.716998240690373e-05, |
|
"loss": 0.8975, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.3916913946587537, |
|
"grad_norm": 1.0924110412597656, |
|
"learning_rate": 4.714726509738293e-05, |
|
"loss": 0.9747, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.39287833827893176, |
|
"grad_norm": 0.9143339395523071, |
|
"learning_rate": 4.712446248854413e-05, |
|
"loss": 0.8604, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.3940652818991098, |
|
"grad_norm": 0.9802985191345215, |
|
"learning_rate": 4.710157466821068e-05, |
|
"loss": 0.9112, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.39525222551928785, |
|
"grad_norm": 0.8650179505348206, |
|
"learning_rate": 4.7078601724534124e-05, |
|
"loss": 0.9519, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.39643916913946586, |
|
"grad_norm": 0.9761397242546082, |
|
"learning_rate": 4.7055543745993824e-05, |
|
"loss": 0.8376, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.39762611275964393, |
|
"grad_norm": 1.0427495241165161, |
|
"learning_rate": 4.70324008213967e-05, |
|
"loss": 0.8693, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.39881305637982195, |
|
"grad_norm": 0.8753753304481506, |
|
"learning_rate": 4.700917303987682e-05, |
|
"loss": 0.8574, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.9678823947906494, |
|
"learning_rate": 4.698586049089504e-05, |
|
"loss": 0.8288, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.40118694362017804, |
|
"grad_norm": 0.7577453851699829, |
|
"learning_rate": 4.696246326423874e-05, |
|
"loss": 0.9234, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.4023738872403561, |
|
"grad_norm": 0.9350581169128418, |
|
"learning_rate": 4.6938981450021416e-05, |
|
"loss": 0.8608, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.4035608308605341, |
|
"grad_norm": 0.9648170471191406, |
|
"learning_rate": 4.691541513868234e-05, |
|
"loss": 1.0063, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.40474777448071214, |
|
"grad_norm": 0.9693950414657593, |
|
"learning_rate": 4.689176442098624e-05, |
|
"loss": 0.9051, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.4059347181008902, |
|
"grad_norm": 1.0306636095046997, |
|
"learning_rate": 4.6868029388022925e-05, |
|
"loss": 0.8788, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.40712166172106823, |
|
"grad_norm": 0.9469864368438721, |
|
"learning_rate": 4.684421013120694e-05, |
|
"loss": 0.8339, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.4083086053412463, |
|
"grad_norm": 1.083781123161316, |
|
"learning_rate": 4.6820306742277184e-05, |
|
"loss": 0.816, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.4094955489614243, |
|
"grad_norm": 0.7388644218444824, |
|
"learning_rate": 4.6796319313296653e-05, |
|
"loss": 0.9355, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.4106824925816024, |
|
"grad_norm": 0.7487624287605286, |
|
"learning_rate": 4.677224793665197e-05, |
|
"loss": 0.8786, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.4118694362017804, |
|
"grad_norm": 1.0239750146865845, |
|
"learning_rate": 4.674809270505308e-05, |
|
"loss": 0.8933, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.4130563798219585, |
|
"grad_norm": 0.9962897896766663, |
|
"learning_rate": 4.672385371153293e-05, |
|
"loss": 0.9012, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.4142433234421365, |
|
"grad_norm": 1.0697799921035767, |
|
"learning_rate": 4.669953104944703e-05, |
|
"loss": 0.8838, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.41543026706231456, |
|
"grad_norm": 0.9961535930633545, |
|
"learning_rate": 4.667512481247316e-05, |
|
"loss": 0.8607, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.41543026706231456, |
|
"eval_loss": 0.8028744459152222, |
|
"eval_runtime": 777.2663, |
|
"eval_samples_per_second": 2.282, |
|
"eval_steps_per_second": 0.571, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.4166172106824926, |
|
"grad_norm": 0.8088350892066956, |
|
"learning_rate": 4.665063509461097e-05, |
|
"loss": 0.7517, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.41780415430267065, |
|
"grad_norm": 0.9155455827713013, |
|
"learning_rate": 4.6626061990181655e-05, |
|
"loss": 0.8841, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.41899109792284867, |
|
"grad_norm": 1.1847515106201172, |
|
"learning_rate": 4.660140559382754e-05, |
|
"loss": 0.8914, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.4201780415430267, |
|
"grad_norm": 1.216593623161316, |
|
"learning_rate": 4.6576666000511795e-05, |
|
"loss": 0.8503, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.42136498516320475, |
|
"grad_norm": 0.8132240772247314, |
|
"learning_rate": 4.6551843305517955e-05, |
|
"loss": 0.8313, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.42255192878338277, |
|
"grad_norm": 0.7744690775871277, |
|
"learning_rate": 4.652693760444967e-05, |
|
"loss": 0.8644, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.42373887240356084, |
|
"grad_norm": 1.0237466096878052, |
|
"learning_rate": 4.650194899323025e-05, |
|
"loss": 0.7594, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.42492581602373886, |
|
"grad_norm": 1.1930066347122192, |
|
"learning_rate": 4.647687756810236e-05, |
|
"loss": 0.8841, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.4261127596439169, |
|
"grad_norm": 1.263022780418396, |
|
"learning_rate": 4.64517234256276e-05, |
|
"loss": 0.8631, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.42729970326409494, |
|
"grad_norm": 0.7010476589202881, |
|
"learning_rate": 4.642648666268616e-05, |
|
"loss": 0.929, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.428486646884273, |
|
"grad_norm": 0.9364275336265564, |
|
"learning_rate": 4.6401167376476426e-05, |
|
"loss": 0.8954, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.42967359050445103, |
|
"grad_norm": 0.752667248249054, |
|
"learning_rate": 4.637576566451465e-05, |
|
"loss": 0.8819, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.4308605341246291, |
|
"grad_norm": 0.9347110986709595, |
|
"learning_rate": 4.63502816246345e-05, |
|
"loss": 0.9257, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.4320474777448071, |
|
"grad_norm": 0.7974644303321838, |
|
"learning_rate": 4.632471535498677e-05, |
|
"loss": 0.9112, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.4332344213649852, |
|
"grad_norm": 0.8897979855537415, |
|
"learning_rate": 4.629906695403894e-05, |
|
"loss": 0.8222, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.4344213649851632, |
|
"grad_norm": 0.7003577351570129, |
|
"learning_rate": 4.6273336520574796e-05, |
|
"loss": 0.8635, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.4356083086053412, |
|
"grad_norm": 0.741892397403717, |
|
"learning_rate": 4.62475241536941e-05, |
|
"loss": 0.8561, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.4367952522255193, |
|
"grad_norm": 0.816657304763794, |
|
"learning_rate": 4.622162995281216e-05, |
|
"loss": 0.9304, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.4379821958456973, |
|
"grad_norm": 0.8286069631576538, |
|
"learning_rate": 4.6195654017659475e-05, |
|
"loss": 0.9195, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.4391691394658754, |
|
"grad_norm": 1.2624543905258179, |
|
"learning_rate": 4.616959644828133e-05, |
|
"loss": 0.8274, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.4403560830860534, |
|
"grad_norm": 0.7968791127204895, |
|
"learning_rate": 4.614345734503743e-05, |
|
"loss": 0.8574, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.44154302670623147, |
|
"grad_norm": 0.7390718460083008, |
|
"learning_rate": 4.6117236808601495e-05, |
|
"loss": 0.8595, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.4427299703264095, |
|
"grad_norm": 0.8652002811431885, |
|
"learning_rate": 4.6090934939960894e-05, |
|
"loss": 0.9118, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.44391691394658755, |
|
"grad_norm": 0.7718791365623474, |
|
"learning_rate": 4.606455184041622e-05, |
|
"loss": 0.9111, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.44510385756676557, |
|
"grad_norm": 0.7578750848770142, |
|
"learning_rate": 4.603808761158097e-05, |
|
"loss": 0.831, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.44629080118694364, |
|
"grad_norm": 0.7663601636886597, |
|
"learning_rate": 4.6011542355381055e-05, |
|
"loss": 0.8584, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.44747774480712166, |
|
"grad_norm": 0.9673477411270142, |
|
"learning_rate": 4.5984916174054494e-05, |
|
"loss": 0.8244, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.44866468842729973, |
|
"grad_norm": 0.8995377421379089, |
|
"learning_rate": 4.5958209170150956e-05, |
|
"loss": 0.8669, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.44985163204747775, |
|
"grad_norm": 0.7621747851371765, |
|
"learning_rate": 4.593142144653143e-05, |
|
"loss": 0.7874, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.45103857566765576, |
|
"grad_norm": 0.9863945841789246, |
|
"learning_rate": 4.5904553106367774e-05, |
|
"loss": 0.8802, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.45222551928783383, |
|
"grad_norm": 0.9811915159225464, |
|
"learning_rate": 4.5877604253142336e-05, |
|
"loss": 0.8655, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.45341246290801185, |
|
"grad_norm": 0.9134243130683899, |
|
"learning_rate": 4.5850574990647566e-05, |
|
"loss": 0.843, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.4545994065281899, |
|
"grad_norm": 0.788000226020813, |
|
"learning_rate": 4.58234654229856e-05, |
|
"loss": 0.9024, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.45578635014836794, |
|
"grad_norm": 0.9368435740470886, |
|
"learning_rate": 4.579627565456786e-05, |
|
"loss": 0.8806, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.456973293768546, |
|
"grad_norm": 0.9639891982078552, |
|
"learning_rate": 4.576900579011469e-05, |
|
"loss": 0.8829, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.458160237388724, |
|
"grad_norm": 0.9119428992271423, |
|
"learning_rate": 4.574165593465487e-05, |
|
"loss": 0.8648, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.4593471810089021, |
|
"grad_norm": 0.9542068243026733, |
|
"learning_rate": 4.571422619352531e-05, |
|
"loss": 0.9334, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.4605341246290801, |
|
"grad_norm": 0.9423951506614685, |
|
"learning_rate": 4.5686716672370574e-05, |
|
"loss": 0.8339, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.4617210682492582, |
|
"grad_norm": 0.8227980732917786, |
|
"learning_rate": 4.565912747714248e-05, |
|
"loss": 0.9334, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.4629080118694362, |
|
"grad_norm": 0.5783571004867554, |
|
"learning_rate": 4.563145871409974e-05, |
|
"loss": 0.818, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.46409495548961427, |
|
"grad_norm": 0.8463440537452698, |
|
"learning_rate": 4.560371048980751e-05, |
|
"loss": 0.864, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.4652818991097923, |
|
"grad_norm": 0.6289271116256714, |
|
"learning_rate": 4.5575882911136966e-05, |
|
"loss": 0.9059, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.4664688427299703, |
|
"grad_norm": 0.9256350994110107, |
|
"learning_rate": 4.554797608526493e-05, |
|
"loss": 0.8776, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.4676557863501484, |
|
"grad_norm": 0.9969144463539124, |
|
"learning_rate": 4.5519990119673454e-05, |
|
"loss": 0.8477, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.4688427299703264, |
|
"grad_norm": 0.7296236753463745, |
|
"learning_rate": 4.5491925122149373e-05, |
|
"loss": 0.9374, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.47002967359050446, |
|
"grad_norm": 0.9291085004806519, |
|
"learning_rate": 4.546378120078392e-05, |
|
"loss": 0.9612, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.4712166172106825, |
|
"grad_norm": 0.977351188659668, |
|
"learning_rate": 4.543555846397229e-05, |
|
"loss": 0.8603, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.47240356083086055, |
|
"grad_norm": 0.6831493377685547, |
|
"learning_rate": 4.540725702041326e-05, |
|
"loss": 0.8115, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.47359050445103856, |
|
"grad_norm": 1.238186001777649, |
|
"learning_rate": 4.537887697910871e-05, |
|
"loss": 0.7757, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.47477744807121663, |
|
"grad_norm": 0.8050864338874817, |
|
"learning_rate": 4.535041844936324e-05, |
|
"loss": 0.8233, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.47477744807121663, |
|
"eval_loss": 0.7993733286857605, |
|
"eval_runtime": 773.0065, |
|
"eval_samples_per_second": 2.295, |
|
"eval_steps_per_second": 0.574, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.47596439169139465, |
|
"grad_norm": 0.8855427503585815, |
|
"learning_rate": 4.532188154078377e-05, |
|
"loss": 0.82, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.4771513353115727, |
|
"grad_norm": 0.9453259706497192, |
|
"learning_rate": 4.5293266363279084e-05, |
|
"loss": 0.9316, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.47833827893175074, |
|
"grad_norm": 0.8510525226593018, |
|
"learning_rate": 4.526457302705939e-05, |
|
"loss": 0.8287, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.4795252225519288, |
|
"grad_norm": 0.757831871509552, |
|
"learning_rate": 4.5235801642635956e-05, |
|
"loss": 0.8773, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.4807121661721068, |
|
"grad_norm": 0.649082362651825, |
|
"learning_rate": 4.520695232082062e-05, |
|
"loss": 0.8056, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.48189910979228484, |
|
"grad_norm": 0.7211092710494995, |
|
"learning_rate": 4.517802517272542e-05, |
|
"loss": 0.8503, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.4830860534124629, |
|
"grad_norm": 1.1383498907089233, |
|
"learning_rate": 4.514902030976212e-05, |
|
"loss": 0.9434, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.48427299703264093, |
|
"grad_norm": 0.8772423267364502, |
|
"learning_rate": 4.5119937843641816e-05, |
|
"loss": 0.8708, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.485459940652819, |
|
"grad_norm": 0.8966817259788513, |
|
"learning_rate": 4.509077788637446e-05, |
|
"loss": 0.881, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.486646884272997, |
|
"grad_norm": 0.7666980624198914, |
|
"learning_rate": 4.506154055026849e-05, |
|
"loss": 0.8542, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.4878338278931751, |
|
"grad_norm": 0.8315165042877197, |
|
"learning_rate": 4.5032225947930354e-05, |
|
"loss": 0.8535, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.4890207715133531, |
|
"grad_norm": 0.9881699681282043, |
|
"learning_rate": 4.5002834192264074e-05, |
|
"loss": 0.9076, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.4902077151335312, |
|
"grad_norm": 0.7229058146476746, |
|
"learning_rate": 4.497336539647086e-05, |
|
"loss": 0.8915, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.4913946587537092, |
|
"grad_norm": 0.8700774312019348, |
|
"learning_rate": 4.494381967404859e-05, |
|
"loss": 0.8986, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.49258160237388726, |
|
"grad_norm": 0.8586352467536926, |
|
"learning_rate": 4.491419713879147e-05, |
|
"loss": 0.886, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.4937685459940653, |
|
"grad_norm": 0.9397504329681396, |
|
"learning_rate": 4.488449790478952e-05, |
|
"loss": 0.8476, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.49495548961424335, |
|
"grad_norm": 0.9243069887161255, |
|
"learning_rate": 4.4854722086428155e-05, |
|
"loss": 0.8302, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.49614243323442137, |
|
"grad_norm": 0.9511041641235352, |
|
"learning_rate": 4.482486979838779e-05, |
|
"loss": 0.8928, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.4973293768545994, |
|
"grad_norm": 0.7937717437744141, |
|
"learning_rate": 4.479494115564332e-05, |
|
"loss": 0.8834, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.49851632047477745, |
|
"grad_norm": 0.7729777693748474, |
|
"learning_rate": 4.4764936273463734e-05, |
|
"loss": 0.8346, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.49970326409495547, |
|
"grad_norm": 0.8554561734199524, |
|
"learning_rate": 4.473485526741164e-05, |
|
"loss": 0.9021, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.5008902077151335, |
|
"grad_norm": 1.142722725868225, |
|
"learning_rate": 4.470469825334287e-05, |
|
"loss": 0.8927, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.5020771513353116, |
|
"grad_norm": 0.8637948632240295, |
|
"learning_rate": 4.467446534740596e-05, |
|
"loss": 0.9063, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.5032640949554896, |
|
"grad_norm": 0.7488009929656982, |
|
"learning_rate": 4.464415666604176e-05, |
|
"loss": 0.8494, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.5044510385756676, |
|
"grad_norm": 1.1592786312103271, |
|
"learning_rate": 4.4613772325982954e-05, |
|
"loss": 0.8737, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.5056379821958457, |
|
"grad_norm": 0.8699279427528381, |
|
"learning_rate": 4.458331244425364e-05, |
|
"loss": 0.8825, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.5068249258160238, |
|
"grad_norm": 0.8104872703552246, |
|
"learning_rate": 4.4552777138168836e-05, |
|
"loss": 0.8296, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.5080118694362018, |
|
"grad_norm": 0.9313340187072754, |
|
"learning_rate": 4.452216652533409e-05, |
|
"loss": 0.92, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.5091988130563798, |
|
"grad_norm": 1.2348231077194214, |
|
"learning_rate": 4.4491480723644944e-05, |
|
"loss": 0.9548, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.5103857566765578, |
|
"grad_norm": 0.7835610508918762, |
|
"learning_rate": 4.446071985128657e-05, |
|
"loss": 0.8719, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.511572700296736, |
|
"grad_norm": 0.7401361465454102, |
|
"learning_rate": 4.442988402673326e-05, |
|
"loss": 0.9493, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 0.512759643916914, |
|
"grad_norm": 0.9936283826828003, |
|
"learning_rate": 4.439897336874796e-05, |
|
"loss": 0.849, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.513946587537092, |
|
"grad_norm": 0.7580452561378479, |
|
"learning_rate": 4.436798799638186e-05, |
|
"loss": 0.8755, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 0.51513353115727, |
|
"grad_norm": 0.6950749754905701, |
|
"learning_rate": 4.433692802897389e-05, |
|
"loss": 0.8511, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.516320474777448, |
|
"grad_norm": 0.9436466693878174, |
|
"learning_rate": 4.43057935861503e-05, |
|
"loss": 0.8442, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.5175074183976262, |
|
"grad_norm": 0.6432386636734009, |
|
"learning_rate": 4.4274584787824166e-05, |
|
"loss": 0.8074, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 0.5186943620178042, |
|
"grad_norm": 0.6607010364532471, |
|
"learning_rate": 4.424330175419495e-05, |
|
"loss": 0.8124, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 0.5198813056379822, |
|
"grad_norm": 0.7476988434791565, |
|
"learning_rate": 4.421194460574801e-05, |
|
"loss": 0.9036, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.5210682492581602, |
|
"grad_norm": 0.6674066781997681, |
|
"learning_rate": 4.418051346325417e-05, |
|
"loss": 0.8132, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 0.5222551928783383, |
|
"grad_norm": 1.0428473949432373, |
|
"learning_rate": 4.414900844776924e-05, |
|
"loss": 0.897, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.5234421364985163, |
|
"grad_norm": 1.014649510383606, |
|
"learning_rate": 4.411742968063354e-05, |
|
"loss": 0.8464, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.5246290801186944, |
|
"grad_norm": 0.7525945901870728, |
|
"learning_rate": 4.408577728347144e-05, |
|
"loss": 0.8195, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 0.5258160237388724, |
|
"grad_norm": 0.8915858268737793, |
|
"learning_rate": 4.4054051378190915e-05, |
|
"loss": 0.8397, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 0.5270029673590505, |
|
"grad_norm": 0.9980171918869019, |
|
"learning_rate": 4.402225208698303e-05, |
|
"loss": 0.8515, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.5281899109792285, |
|
"grad_norm": 0.862018883228302, |
|
"learning_rate": 4.3990379532321495e-05, |
|
"loss": 0.9129, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.5293768545994065, |
|
"grad_norm": 0.6026172041893005, |
|
"learning_rate": 4.395843383696221e-05, |
|
"loss": 0.9185, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 0.5305637982195845, |
|
"grad_norm": 0.8191978335380554, |
|
"learning_rate": 4.392641512394274e-05, |
|
"loss": 0.8652, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.5317507418397626, |
|
"grad_norm": 0.7325008511543274, |
|
"learning_rate": 4.389432351658193e-05, |
|
"loss": 0.8111, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 0.5329376854599407, |
|
"grad_norm": 0.7030380368232727, |
|
"learning_rate": 4.3862159138479305e-05, |
|
"loss": 0.8795, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 0.5341246290801187, |
|
"grad_norm": 1.045393466949463, |
|
"learning_rate": 4.382992211351471e-05, |
|
"loss": 0.9116, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.5341246290801187, |
|
"eval_loss": 0.7951666116714478, |
|
"eval_runtime": 772.1963, |
|
"eval_samples_per_second": 2.297, |
|
"eval_steps_per_second": 0.575, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.5353115727002967, |
|
"grad_norm": 0.9076895713806152, |
|
"learning_rate": 4.3797612565847785e-05, |
|
"loss": 0.8539, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 0.5364985163204747, |
|
"grad_norm": 0.7308244109153748, |
|
"learning_rate": 4.376523061991747e-05, |
|
"loss": 0.8028, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 0.5376854599406529, |
|
"grad_norm": 0.8944220542907715, |
|
"learning_rate": 4.373277640044156e-05, |
|
"loss": 0.8532, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 0.5388724035608309, |
|
"grad_norm": 0.7320210337638855, |
|
"learning_rate": 4.370025003241618e-05, |
|
"loss": 0.8444, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 0.5400593471810089, |
|
"grad_norm": 0.7950882315635681, |
|
"learning_rate": 4.366765164111538e-05, |
|
"loss": 0.8683, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.5412462908011869, |
|
"grad_norm": 0.9182088375091553, |
|
"learning_rate": 4.3634981352090555e-05, |
|
"loss": 0.8671, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 0.542433234421365, |
|
"grad_norm": 0.943240761756897, |
|
"learning_rate": 4.3602239291170056e-05, |
|
"loss": 0.8846, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 0.5436201780415431, |
|
"grad_norm": 0.8208020925521851, |
|
"learning_rate": 4.356942558445862e-05, |
|
"loss": 0.8565, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 0.5448071216617211, |
|
"grad_norm": 1.0908632278442383, |
|
"learning_rate": 4.353654035833697e-05, |
|
"loss": 0.9188, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 0.5459940652818991, |
|
"grad_norm": 0.796347439289093, |
|
"learning_rate": 4.3503583739461235e-05, |
|
"loss": 0.8693, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.5471810089020771, |
|
"grad_norm": 0.9802513718605042, |
|
"learning_rate": 4.347055585476254e-05, |
|
"loss": 0.8915, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 0.5483679525222552, |
|
"grad_norm": 0.9925609827041626, |
|
"learning_rate": 4.343745683144647e-05, |
|
"loss": 0.9135, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 0.5495548961424332, |
|
"grad_norm": 0.9672980904579163, |
|
"learning_rate": 4.340428679699262e-05, |
|
"loss": 0.8645, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 0.5507418397626113, |
|
"grad_norm": 0.7970063090324402, |
|
"learning_rate": 4.337104587915407e-05, |
|
"loss": 0.8438, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 0.5519287833827893, |
|
"grad_norm": 0.8159870505332947, |
|
"learning_rate": 4.3337734205956905e-05, |
|
"loss": 0.8973, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.5531157270029674, |
|
"grad_norm": 0.7052998542785645, |
|
"learning_rate": 4.3304351905699714e-05, |
|
"loss": 0.9022, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 0.5543026706231454, |
|
"grad_norm": 1.0930620431900024, |
|
"learning_rate": 4.3270899106953105e-05, |
|
"loss": 0.8904, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 0.5554896142433234, |
|
"grad_norm": 0.8277913928031921, |
|
"learning_rate": 4.323737593855922e-05, |
|
"loss": 0.9505, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 0.5566765578635015, |
|
"grad_norm": 1.1288316249847412, |
|
"learning_rate": 4.320378252963123e-05, |
|
"loss": 0.8257, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 0.5578635014836796, |
|
"grad_norm": 0.7659019231796265, |
|
"learning_rate": 4.31701190095528e-05, |
|
"loss": 0.8508, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.5590504451038576, |
|
"grad_norm": 0.7803889513015747, |
|
"learning_rate": 4.3136385507977674e-05, |
|
"loss": 0.9115, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 0.5602373887240356, |
|
"grad_norm": 1.101162075996399, |
|
"learning_rate": 4.310258215482909e-05, |
|
"loss": 0.8446, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 0.5614243323442136, |
|
"grad_norm": 0.9544945955276489, |
|
"learning_rate": 4.306870908029933e-05, |
|
"loss": 0.9256, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 0.5626112759643916, |
|
"grad_norm": 0.6556288003921509, |
|
"learning_rate": 4.303476641484921e-05, |
|
"loss": 0.8348, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 0.5637982195845698, |
|
"grad_norm": 0.7247066497802734, |
|
"learning_rate": 4.300075428920756e-05, |
|
"loss": 0.8539, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.5649851632047478, |
|
"grad_norm": 0.8974981904029846, |
|
"learning_rate": 4.2966672834370746e-05, |
|
"loss": 0.8447, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 0.5661721068249258, |
|
"grad_norm": 0.7167539596557617, |
|
"learning_rate": 4.2932522181602146e-05, |
|
"loss": 0.881, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 0.5673590504451038, |
|
"grad_norm": 0.8274498581886292, |
|
"learning_rate": 4.289830246243165e-05, |
|
"loss": 0.834, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 0.568545994065282, |
|
"grad_norm": 0.9641168117523193, |
|
"learning_rate": 4.2864013808655155e-05, |
|
"loss": 0.7953, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 0.56973293768546, |
|
"grad_norm": 1.0160974264144897, |
|
"learning_rate": 4.282965635233407e-05, |
|
"loss": 0.9052, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.570919881305638, |
|
"grad_norm": 0.7646157145500183, |
|
"learning_rate": 4.279523022579477e-05, |
|
"loss": 0.8616, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 0.572106824925816, |
|
"grad_norm": 0.6680961847305298, |
|
"learning_rate": 4.276073556162813e-05, |
|
"loss": 0.8366, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 0.5732937685459941, |
|
"grad_norm": 0.5522508025169373, |
|
"learning_rate": 4.272617249268899e-05, |
|
"loss": 0.7855, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 0.5744807121661721, |
|
"grad_norm": 0.989126443862915, |
|
"learning_rate": 4.2691541152095636e-05, |
|
"loss": 0.8128, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 0.5756676557863502, |
|
"grad_norm": 0.8510265946388245, |
|
"learning_rate": 4.265684167322932e-05, |
|
"loss": 0.8525, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.5768545994065282, |
|
"grad_norm": 0.9399095177650452, |
|
"learning_rate": 4.2622074189733715e-05, |
|
"loss": 0.801, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 0.5780415430267062, |
|
"grad_norm": 0.8262656331062317, |
|
"learning_rate": 4.258723883551441e-05, |
|
"loss": 0.8555, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 0.5792284866468843, |
|
"grad_norm": 0.9158207178115845, |
|
"learning_rate": 4.25523357447384e-05, |
|
"loss": 0.8475, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 0.5804154302670623, |
|
"grad_norm": 0.8734104633331299, |
|
"learning_rate": 4.251736505183356e-05, |
|
"loss": 0.8769, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 0.5816023738872403, |
|
"grad_norm": 0.6260775923728943, |
|
"learning_rate": 4.2482326891488146e-05, |
|
"loss": 0.9077, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.5827893175074184, |
|
"grad_norm": 0.7941166758537292, |
|
"learning_rate": 4.244722139865023e-05, |
|
"loss": 0.8298, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 0.5839762611275965, |
|
"grad_norm": 1.005319356918335, |
|
"learning_rate": 4.241204870852725e-05, |
|
"loss": 0.8613, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 0.5851632047477745, |
|
"grad_norm": 0.7308863401412964, |
|
"learning_rate": 4.237680895658543e-05, |
|
"loss": 0.8962, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 0.5863501483679525, |
|
"grad_norm": 0.7635941505432129, |
|
"learning_rate": 4.2341502278549285e-05, |
|
"loss": 0.7996, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 0.5875370919881305, |
|
"grad_norm": 0.8385136723518372, |
|
"learning_rate": 4.230612881040108e-05, |
|
"loss": 0.8976, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.5887240356083087, |
|
"grad_norm": 1.0231964588165283, |
|
"learning_rate": 4.227068868838035e-05, |
|
"loss": 0.8559, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.5899109792284867, |
|
"grad_norm": 1.0560635328292847, |
|
"learning_rate": 4.223518204898332e-05, |
|
"loss": 0.8956, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 0.5910979228486647, |
|
"grad_norm": 0.8823438286781311, |
|
"learning_rate": 4.2199609028962414e-05, |
|
"loss": 0.8867, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 0.5922848664688427, |
|
"grad_norm": 0.8164929747581482, |
|
"learning_rate": 4.216396976532571e-05, |
|
"loss": 0.8373, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 0.5934718100890207, |
|
"grad_norm": 1.1652288436889648, |
|
"learning_rate": 4.2128264395336436e-05, |
|
"loss": 0.8498, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.5934718100890207, |
|
"eval_loss": 0.7921267747879028, |
|
"eval_runtime": 774.3302, |
|
"eval_samples_per_second": 2.291, |
|
"eval_steps_per_second": 0.573, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.5946587537091989, |
|
"grad_norm": 0.7231582999229431, |
|
"learning_rate": 4.20924930565124e-05, |
|
"loss": 0.8121, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 0.5958456973293769, |
|
"grad_norm": 0.8947294354438782, |
|
"learning_rate": 4.205665588662553e-05, |
|
"loss": 0.8989, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 0.5970326409495549, |
|
"grad_norm": 0.9510688781738281, |
|
"learning_rate": 4.202075302370124e-05, |
|
"loss": 0.8759, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 0.5982195845697329, |
|
"grad_norm": 0.892690896987915, |
|
"learning_rate": 4.198478460601801e-05, |
|
"loss": 0.8538, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 0.599406528189911, |
|
"grad_norm": 0.8187076449394226, |
|
"learning_rate": 4.194875077210677e-05, |
|
"loss": 0.852, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 0.600593471810089, |
|
"grad_norm": 0.9023217558860779, |
|
"learning_rate": 4.191265166075043e-05, |
|
"loss": 0.8713, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 0.6017804154302671, |
|
"grad_norm": 0.797012209892273, |
|
"learning_rate": 4.1876487410983275e-05, |
|
"loss": 0.8255, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 0.6029673590504451, |
|
"grad_norm": 0.7908094525337219, |
|
"learning_rate": 4.184025816209049e-05, |
|
"loss": 0.8534, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 0.6041543026706232, |
|
"grad_norm": 0.8755646347999573, |
|
"learning_rate": 4.180396405360761e-05, |
|
"loss": 0.8395, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 0.6053412462908012, |
|
"grad_norm": 1.0383625030517578, |
|
"learning_rate": 4.176760522531995e-05, |
|
"loss": 0.8838, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.6065281899109792, |
|
"grad_norm": 0.937275767326355, |
|
"learning_rate": 4.17311818172621e-05, |
|
"loss": 0.8722, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 0.6077151335311572, |
|
"grad_norm": 0.9110156297683716, |
|
"learning_rate": 4.169469396971739e-05, |
|
"loss": 0.8799, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.6089020771513353, |
|
"grad_norm": 1.0142481327056885, |
|
"learning_rate": 4.165814182321732e-05, |
|
"loss": 0.8696, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 0.6100890207715134, |
|
"grad_norm": 1.2037073373794556, |
|
"learning_rate": 4.162152551854105e-05, |
|
"loss": 0.8787, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 0.6112759643916914, |
|
"grad_norm": 1.0719590187072754, |
|
"learning_rate": 4.1584845196714815e-05, |
|
"loss": 0.9276, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 0.6124629080118694, |
|
"grad_norm": 0.6941603422164917, |
|
"learning_rate": 4.1548100999011444e-05, |
|
"loss": 0.8573, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 0.6136498516320474, |
|
"grad_norm": 1.162614107131958, |
|
"learning_rate": 4.1511293066949766e-05, |
|
"loss": 0.852, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 0.6148367952522256, |
|
"grad_norm": 0.8986325860023499, |
|
"learning_rate": 4.147442154229406e-05, |
|
"loss": 0.8469, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 0.6160237388724036, |
|
"grad_norm": 0.7604609131813049, |
|
"learning_rate": 4.1437486567053565e-05, |
|
"loss": 0.7998, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 0.6172106824925816, |
|
"grad_norm": 0.7332816123962402, |
|
"learning_rate": 4.140048828348188e-05, |
|
"loss": 0.8393, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.6183976261127596, |
|
"grad_norm": 1.0732022523880005, |
|
"learning_rate": 4.1363426834076435e-05, |
|
"loss": 0.8923, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 0.6195845697329377, |
|
"grad_norm": 0.7702149748802185, |
|
"learning_rate": 4.132630236157793e-05, |
|
"loss": 0.8341, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 0.6207715133531158, |
|
"grad_norm": 0.7818199992179871, |
|
"learning_rate": 4.128911500896983e-05, |
|
"loss": 0.8694, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 0.6219584569732938, |
|
"grad_norm": 1.0020356178283691, |
|
"learning_rate": 4.1251864919477736e-05, |
|
"loss": 0.7892, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 0.6231454005934718, |
|
"grad_norm": 0.595758855342865, |
|
"learning_rate": 4.12145522365689e-05, |
|
"loss": 0.7445, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 0.6243323442136498, |
|
"grad_norm": 0.8680073618888855, |
|
"learning_rate": 4.117717710395166e-05, |
|
"loss": 0.8066, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 0.6255192878338279, |
|
"grad_norm": 1.1112210750579834, |
|
"learning_rate": 4.1139739665574856e-05, |
|
"loss": 0.8548, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.626706231454006, |
|
"grad_norm": 0.8211454153060913, |
|
"learning_rate": 4.11022400656273e-05, |
|
"loss": 0.8481, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 0.627893175074184, |
|
"grad_norm": 0.9181323051452637, |
|
"learning_rate": 4.1064678448537244e-05, |
|
"loss": 0.8785, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 0.629080118694362, |
|
"grad_norm": 1.1044420003890991, |
|
"learning_rate": 4.1027054958971746e-05, |
|
"loss": 0.8726, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.6302670623145401, |
|
"grad_norm": 0.8787449598312378, |
|
"learning_rate": 4.098936974183621e-05, |
|
"loss": 0.8111, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 0.6314540059347181, |
|
"grad_norm": 0.7336760759353638, |
|
"learning_rate": 4.095162294227377e-05, |
|
"loss": 0.8404, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 0.6326409495548961, |
|
"grad_norm": 0.8776949048042297, |
|
"learning_rate": 4.091381470566472e-05, |
|
"loss": 0.8317, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 0.6338278931750742, |
|
"grad_norm": 1.099990963935852, |
|
"learning_rate": 4.0875945177625996e-05, |
|
"loss": 0.8799, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 0.6350148367952523, |
|
"grad_norm": 0.7875162959098816, |
|
"learning_rate": 4.08380145040106e-05, |
|
"loss": 0.8337, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 0.6362017804154303, |
|
"grad_norm": 0.8717107772827148, |
|
"learning_rate": 4.0800022830907015e-05, |
|
"loss": 0.8433, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 0.6373887240356083, |
|
"grad_norm": 0.9052039980888367, |
|
"learning_rate": 4.076197030463868e-05, |
|
"loss": 0.9926, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 0.6385756676557863, |
|
"grad_norm": 0.9107869863510132, |
|
"learning_rate": 4.072385707176338e-05, |
|
"loss": 0.8785, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 0.6397626112759643, |
|
"grad_norm": 0.8352899551391602, |
|
"learning_rate": 4.068568327907275e-05, |
|
"loss": 0.8804, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 0.6409495548961425, |
|
"grad_norm": 1.1323997974395752, |
|
"learning_rate": 4.064744907359164e-05, |
|
"loss": 0.8881, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.6421364985163205, |
|
"grad_norm": 0.8301699161529541, |
|
"learning_rate": 4.060915460257759e-05, |
|
"loss": 0.8865, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 0.6433234421364985, |
|
"grad_norm": 0.9869147539138794, |
|
"learning_rate": 4.057080001352023e-05, |
|
"loss": 0.8698, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 0.6445103857566765, |
|
"grad_norm": 0.8932684063911438, |
|
"learning_rate": 4.0532385454140764e-05, |
|
"loss": 0.8295, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 0.6456973293768546, |
|
"grad_norm": 0.9118059277534485, |
|
"learning_rate": 4.0493911072391344e-05, |
|
"loss": 0.8094, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 0.6468842729970327, |
|
"grad_norm": 0.9855166673660278, |
|
"learning_rate": 4.0455377016454526e-05, |
|
"loss": 0.864, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 0.6480712166172107, |
|
"grad_norm": 0.7110412120819092, |
|
"learning_rate": 4.041678343474271e-05, |
|
"loss": 0.887, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 0.6492581602373887, |
|
"grad_norm": 0.886139988899231, |
|
"learning_rate": 4.037813047589757e-05, |
|
"loss": 0.8107, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 0.6504451038575668, |
|
"grad_norm": 0.9182664155960083, |
|
"learning_rate": 4.0339418288789444e-05, |
|
"loss": 0.8721, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 0.6516320474777448, |
|
"grad_norm": 0.9091856479644775, |
|
"learning_rate": 4.030064702251678e-05, |
|
"loss": 0.8063, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 0.6528189910979229, |
|
"grad_norm": 0.971271812915802, |
|
"learning_rate": 4.02618168264056e-05, |
|
"loss": 0.934, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.6528189910979229, |
|
"eval_loss": 0.7894634008407593, |
|
"eval_runtime": 773.6717, |
|
"eval_samples_per_second": 2.293, |
|
"eval_steps_per_second": 0.574, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.6540059347181009, |
|
"grad_norm": 1.1243486404418945, |
|
"learning_rate": 4.022292785000885e-05, |
|
"loss": 0.8822, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 0.6551928783382789, |
|
"grad_norm": 0.9398186206817627, |
|
"learning_rate": 4.0183980243105915e-05, |
|
"loss": 0.8621, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 0.656379821958457, |
|
"grad_norm": 0.7023178935050964, |
|
"learning_rate": 4.0144974155701954e-05, |
|
"loss": 0.8111, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 0.657566765578635, |
|
"grad_norm": 0.939687192440033, |
|
"learning_rate": 4.0105909738027365e-05, |
|
"loss": 0.8812, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 0.658753709198813, |
|
"grad_norm": 1.0677947998046875, |
|
"learning_rate": 4.006678714053723e-05, |
|
"loss": 0.8459, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 0.6599406528189911, |
|
"grad_norm": 0.9122028350830078, |
|
"learning_rate": 4.002760651391068e-05, |
|
"loss": 0.8306, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 0.6611275964391692, |
|
"grad_norm": 0.8712452054023743, |
|
"learning_rate": 3.9988368009050355e-05, |
|
"loss": 0.8285, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 0.6623145400593472, |
|
"grad_norm": 0.7736614942550659, |
|
"learning_rate": 3.994907177708181e-05, |
|
"loss": 0.8848, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.6635014836795252, |
|
"grad_norm": 0.9067423343658447, |
|
"learning_rate": 3.990971796935293e-05, |
|
"loss": 0.9422, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 0.6646884272997032, |
|
"grad_norm": 0.8553429841995239, |
|
"learning_rate": 3.987030673743335e-05, |
|
"loss": 0.8574, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.6658753709198814, |
|
"grad_norm": 0.8687973618507385, |
|
"learning_rate": 3.9830838233113884e-05, |
|
"loss": 0.8979, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 0.6670623145400594, |
|
"grad_norm": 0.9838027954101562, |
|
"learning_rate": 3.9791312608405916e-05, |
|
"loss": 0.8126, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 0.6682492581602374, |
|
"grad_norm": 0.9024927020072937, |
|
"learning_rate": 3.9751730015540835e-05, |
|
"loss": 0.8863, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 0.6694362017804154, |
|
"grad_norm": 0.7636504769325256, |
|
"learning_rate": 3.9712090606969425e-05, |
|
"loss": 0.881, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 0.6706231454005934, |
|
"grad_norm": 1.080702543258667, |
|
"learning_rate": 3.967239453536131e-05, |
|
"loss": 0.8608, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 0.6718100890207716, |
|
"grad_norm": 0.9500882029533386, |
|
"learning_rate": 3.963264195360435e-05, |
|
"loss": 0.8783, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 0.6729970326409496, |
|
"grad_norm": 0.8997413516044617, |
|
"learning_rate": 3.9592833014804044e-05, |
|
"loss": 0.7662, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 0.6741839762611276, |
|
"grad_norm": 1.0616391897201538, |
|
"learning_rate": 3.955296787228294e-05, |
|
"loss": 0.9328, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 0.6753709198813056, |
|
"grad_norm": 0.9198963642120361, |
|
"learning_rate": 3.9513046679580075e-05, |
|
"loss": 0.8603, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 0.6765578635014837, |
|
"grad_norm": 0.8361203670501709, |
|
"learning_rate": 3.947306959045034e-05, |
|
"loss": 0.8299, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.6777448071216617, |
|
"grad_norm": 0.8562357425689697, |
|
"learning_rate": 3.943303675886391e-05, |
|
"loss": 0.8098, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 0.6789317507418398, |
|
"grad_norm": 0.8105937242507935, |
|
"learning_rate": 3.939294833900568e-05, |
|
"loss": 0.8345, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 0.6801186943620178, |
|
"grad_norm": 1.072594165802002, |
|
"learning_rate": 3.9352804485274586e-05, |
|
"loss": 0.8251, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 0.6813056379821959, |
|
"grad_norm": 0.7309070229530334, |
|
"learning_rate": 3.9312605352283124e-05, |
|
"loss": 0.8493, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 0.6824925816023739, |
|
"grad_norm": 0.9707894921302795, |
|
"learning_rate": 3.9272351094856665e-05, |
|
"loss": 0.9283, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 0.6836795252225519, |
|
"grad_norm": 0.742894172668457, |
|
"learning_rate": 3.9232041868032885e-05, |
|
"loss": 0.8594, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 0.68486646884273, |
|
"grad_norm": 0.8216899633407593, |
|
"learning_rate": 3.919167782706119e-05, |
|
"loss": 0.8136, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 0.686053412462908, |
|
"grad_norm": 0.9572916626930237, |
|
"learning_rate": 3.915125912740208e-05, |
|
"loss": 0.8865, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 0.6872403560830861, |
|
"grad_norm": 0.8739311695098877, |
|
"learning_rate": 3.911078592472659e-05, |
|
"loss": 0.9605, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 0.6884272997032641, |
|
"grad_norm": 0.8961655497550964, |
|
"learning_rate": 3.907025837491566e-05, |
|
"loss": 0.7693, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.6896142433234421, |
|
"grad_norm": 0.6542685627937317, |
|
"learning_rate": 3.902967663405956e-05, |
|
"loss": 0.7998, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 0.6908011869436201, |
|
"grad_norm": 0.7300246953964233, |
|
"learning_rate": 3.898904085845726e-05, |
|
"loss": 0.8037, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 0.6919881305637983, |
|
"grad_norm": 0.778400719165802, |
|
"learning_rate": 3.894835120461584e-05, |
|
"loss": 0.8407, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 0.6931750741839763, |
|
"grad_norm": 0.7665027379989624, |
|
"learning_rate": 3.8907607829249896e-05, |
|
"loss": 0.8662, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 0.6943620178041543, |
|
"grad_norm": 0.7500550150871277, |
|
"learning_rate": 3.8866810889280944e-05, |
|
"loss": 0.7828, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 0.6955489614243323, |
|
"grad_norm": 1.1609930992126465, |
|
"learning_rate": 3.882596054183678e-05, |
|
"loss": 0.8885, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 0.6967359050445104, |
|
"grad_norm": 0.8244234323501587, |
|
"learning_rate": 3.8785056944250905e-05, |
|
"loss": 0.8757, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 0.6979228486646885, |
|
"grad_norm": 0.9945170879364014, |
|
"learning_rate": 3.874410025406191e-05, |
|
"loss": 0.8839, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 0.6991097922848665, |
|
"grad_norm": 0.9832311868667603, |
|
"learning_rate": 3.8703090629012885e-05, |
|
"loss": 0.906, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 0.7002967359050445, |
|
"grad_norm": 0.8854712843894958, |
|
"learning_rate": 3.866202822705077e-05, |
|
"loss": 0.8103, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 0.7014836795252225, |
|
"grad_norm": 0.8822460770606995, |
|
"learning_rate": 3.862091320632579e-05, |
|
"loss": 0.8785, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 0.7026706231454006, |
|
"grad_norm": 0.9770189523696899, |
|
"learning_rate": 3.857974572519083e-05, |
|
"loss": 0.8647, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 0.7038575667655786, |
|
"grad_norm": 1.002086877822876, |
|
"learning_rate": 3.853852594220082e-05, |
|
"loss": 0.8382, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 0.7050445103857567, |
|
"grad_norm": 1.0117483139038086, |
|
"learning_rate": 3.849725401611212e-05, |
|
"loss": 0.8456, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 0.7062314540059347, |
|
"grad_norm": 0.8960864543914795, |
|
"learning_rate": 3.845593010588193e-05, |
|
"loss": 0.8474, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 0.7074183976261128, |
|
"grad_norm": 0.9335463047027588, |
|
"learning_rate": 3.8414554370667666e-05, |
|
"loss": 0.7904, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 0.7086053412462908, |
|
"grad_norm": 0.8023741841316223, |
|
"learning_rate": 3.837312696982633e-05, |
|
"loss": 0.9063, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 0.7097922848664688, |
|
"grad_norm": 0.9262803792953491, |
|
"learning_rate": 3.833164806291393e-05, |
|
"loss": 0.8648, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 0.7109792284866469, |
|
"grad_norm": 0.8695958852767944, |
|
"learning_rate": 3.829011780968483e-05, |
|
"loss": 0.7609, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 0.712166172106825, |
|
"grad_norm": 0.7884494662284851, |
|
"learning_rate": 3.824853637009118e-05, |
|
"loss": 0.8551, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.712166172106825, |
|
"eval_loss": 0.7863911390304565, |
|
"eval_runtime": 771.9015, |
|
"eval_samples_per_second": 2.298, |
|
"eval_steps_per_second": 0.575, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.713353115727003, |
|
"grad_norm": 0.6585323810577393, |
|
"learning_rate": 3.820690390428224e-05, |
|
"loss": 0.8449, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 0.714540059347181, |
|
"grad_norm": 1.00165855884552, |
|
"learning_rate": 3.816522057260382e-05, |
|
"loss": 0.8509, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 0.715727002967359, |
|
"grad_norm": 1.0901848077774048, |
|
"learning_rate": 3.812348653559762e-05, |
|
"loss": 0.9183, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 0.716913946587537, |
|
"grad_norm": 1.2764759063720703, |
|
"learning_rate": 3.808170195400064e-05, |
|
"loss": 0.8629, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 0.7181008902077152, |
|
"grad_norm": 1.0480196475982666, |
|
"learning_rate": 3.803986698874454e-05, |
|
"loss": 0.8116, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 0.7192878338278932, |
|
"grad_norm": 0.7966551184654236, |
|
"learning_rate": 3.7997981800955066e-05, |
|
"loss": 0.7801, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 0.7204747774480712, |
|
"grad_norm": 1.2469865083694458, |
|
"learning_rate": 3.795604655195133e-05, |
|
"loss": 0.8411, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 0.7216617210682492, |
|
"grad_norm": 0.8085722923278809, |
|
"learning_rate": 3.7914061403245305e-05, |
|
"loss": 0.8729, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 0.7228486646884273, |
|
"grad_norm": 0.9539546370506287, |
|
"learning_rate": 3.7872026516541146e-05, |
|
"loss": 0.8266, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 0.7240356083086054, |
|
"grad_norm": 0.8947449922561646, |
|
"learning_rate": 3.782994205373455e-05, |
|
"loss": 0.8971, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 0.7252225519287834, |
|
"grad_norm": 0.654983401298523, |
|
"learning_rate": 3.778780817691217e-05, |
|
"loss": 0.8984, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 0.7264094955489614, |
|
"grad_norm": 1.3490341901779175, |
|
"learning_rate": 3.7745625048350967e-05, |
|
"loss": 0.9364, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 0.7275964391691395, |
|
"grad_norm": 0.7951292395591736, |
|
"learning_rate": 3.7703392830517594e-05, |
|
"loss": 0.8804, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 0.7287833827893175, |
|
"grad_norm": 0.8905290961265564, |
|
"learning_rate": 3.766111168606778e-05, |
|
"loss": 0.7922, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 0.7299703264094956, |
|
"grad_norm": 0.976385235786438, |
|
"learning_rate": 3.761878177784568e-05, |
|
"loss": 0.7535, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 0.7311572700296736, |
|
"grad_norm": 0.865348219871521, |
|
"learning_rate": 3.757640326888327e-05, |
|
"loss": 0.7965, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 0.7323442136498516, |
|
"grad_norm": 1.9425514936447144, |
|
"learning_rate": 3.7533976322399704e-05, |
|
"loss": 0.9106, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 0.7335311572700297, |
|
"grad_norm": 0.8154645562171936, |
|
"learning_rate": 3.749150110180069e-05, |
|
"loss": 0.8822, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 0.7347181008902077, |
|
"grad_norm": 1.1397662162780762, |
|
"learning_rate": 3.744897777067787e-05, |
|
"loss": 0.9469, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 0.7359050445103857, |
|
"grad_norm": 0.8680721521377563, |
|
"learning_rate": 3.740640649280817e-05, |
|
"loss": 0.8945, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.7370919881305638, |
|
"grad_norm": 0.7108725309371948, |
|
"learning_rate": 3.7363787432153195e-05, |
|
"loss": 0.788, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 0.7382789317507419, |
|
"grad_norm": 0.7222614884376526, |
|
"learning_rate": 3.7321120752858576e-05, |
|
"loss": 0.8395, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 0.7394658753709199, |
|
"grad_norm": 0.888178288936615, |
|
"learning_rate": 3.727840661925334e-05, |
|
"loss": 0.7956, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 0.7406528189910979, |
|
"grad_norm": 0.8803139328956604, |
|
"learning_rate": 3.72356451958493e-05, |
|
"loss": 0.865, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 0.7418397626112759, |
|
"grad_norm": 1.3589036464691162, |
|
"learning_rate": 3.719283664734037e-05, |
|
"loss": 0.8589, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 0.7430267062314541, |
|
"grad_norm": 0.8697090148925781, |
|
"learning_rate": 3.7149981138602e-05, |
|
"loss": 0.8484, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 0.7442136498516321, |
|
"grad_norm": 0.9363373517990112, |
|
"learning_rate": 3.710707883469049e-05, |
|
"loss": 0.8558, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 0.7454005934718101, |
|
"grad_norm": 0.9355902075767517, |
|
"learning_rate": 3.706412990084237e-05, |
|
"loss": 0.7856, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 0.7465875370919881, |
|
"grad_norm": 0.9179161190986633, |
|
"learning_rate": 3.7021134502473765e-05, |
|
"loss": 0.8404, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 0.7477744807121661, |
|
"grad_norm": 1.0483746528625488, |
|
"learning_rate": 3.6978092805179764e-05, |
|
"loss": 0.9132, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 0.7489614243323442, |
|
"grad_norm": 0.6334797739982605, |
|
"learning_rate": 3.693500497473376e-05, |
|
"loss": 0.8527, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 0.7501483679525223, |
|
"grad_norm": 0.9985843300819397, |
|
"learning_rate": 3.6891871177086826e-05, |
|
"loss": 0.869, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 0.7513353115727003, |
|
"grad_norm": 0.7376993298530579, |
|
"learning_rate": 3.684869157836709e-05, |
|
"loss": 0.8631, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 0.7525222551928783, |
|
"grad_norm": 1.340173363685608, |
|
"learning_rate": 3.6805466344879065e-05, |
|
"loss": 0.9274, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 0.7537091988130564, |
|
"grad_norm": 0.8143832087516785, |
|
"learning_rate": 3.676219564310305e-05, |
|
"loss": 0.8822, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 0.7548961424332344, |
|
"grad_norm": 0.7084901928901672, |
|
"learning_rate": 3.671887963969443e-05, |
|
"loss": 0.7689, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 0.7560830860534125, |
|
"grad_norm": 0.8772728443145752, |
|
"learning_rate": 3.6675518501483086e-05, |
|
"loss": 0.8648, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 0.7572700296735905, |
|
"grad_norm": 0.8810312747955322, |
|
"learning_rate": 3.663211239547274e-05, |
|
"loss": 0.864, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 0.7584569732937686, |
|
"grad_norm": 0.7641283869743347, |
|
"learning_rate": 3.658866148884029e-05, |
|
"loss": 0.8561, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 0.7596439169139466, |
|
"grad_norm": 0.7614161372184753, |
|
"learning_rate": 3.654516594893519e-05, |
|
"loss": 0.8465, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 0.7608308605341246, |
|
"grad_norm": 0.8121643662452698, |
|
"learning_rate": 3.6501625943278805e-05, |
|
"loss": 0.8615, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 0.7620178041543026, |
|
"grad_norm": 0.7303301095962524, |
|
"learning_rate": 3.645804163956374e-05, |
|
"loss": 0.8287, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 0.7632047477744807, |
|
"grad_norm": 0.8093650937080383, |
|
"learning_rate": 3.641441320565322e-05, |
|
"loss": 0.7774, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 0.7643916913946588, |
|
"grad_norm": 0.9081480503082275, |
|
"learning_rate": 3.637074080958044e-05, |
|
"loss": 0.8813, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 0.7655786350148368, |
|
"grad_norm": 0.9593875408172607, |
|
"learning_rate": 3.632702461954792e-05, |
|
"loss": 0.8371, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 0.7667655786350148, |
|
"grad_norm": 0.862206757068634, |
|
"learning_rate": 3.628326480392683e-05, |
|
"loss": 0.8444, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 0.7679525222551928, |
|
"grad_norm": 0.7164747714996338, |
|
"learning_rate": 3.623946153125638e-05, |
|
"loss": 0.9414, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 0.769139465875371, |
|
"grad_norm": 0.964017391204834, |
|
"learning_rate": 3.619561497024315e-05, |
|
"loss": 0.8678, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 0.770326409495549, |
|
"grad_norm": 0.7740942239761353, |
|
"learning_rate": 3.615172528976043e-05, |
|
"loss": 0.8334, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 0.771513353115727, |
|
"grad_norm": 0.9031283855438232, |
|
"learning_rate": 3.6107792658847595e-05, |
|
"loss": 0.8742, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.771513353115727, |
|
"eval_loss": 0.7851033806800842, |
|
"eval_runtime": 773.4625, |
|
"eval_samples_per_second": 2.294, |
|
"eval_steps_per_second": 0.574, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.772700296735905, |
|
"grad_norm": 0.9258557558059692, |
|
"learning_rate": 3.6063817246709444e-05, |
|
"loss": 0.8296, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 0.7738872403560831, |
|
"grad_norm": 0.8308103084564209, |
|
"learning_rate": 3.6019799222715544e-05, |
|
"loss": 0.8445, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 0.7750741839762612, |
|
"grad_norm": 0.8182960748672485, |
|
"learning_rate": 3.5975738756399574e-05, |
|
"loss": 0.8563, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 0.7762611275964392, |
|
"grad_norm": 0.9615590572357178, |
|
"learning_rate": 3.593163601745869e-05, |
|
"loss": 0.9132, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 0.7774480712166172, |
|
"grad_norm": 1.027933955192566, |
|
"learning_rate": 3.588749117575284e-05, |
|
"loss": 0.8199, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 0.7786350148367952, |
|
"grad_norm": 0.9510583877563477, |
|
"learning_rate": 3.584330440130415e-05, |
|
"loss": 0.8841, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 0.7798219584569733, |
|
"grad_norm": 1.3776129484176636, |
|
"learning_rate": 3.5799075864296244e-05, |
|
"loss": 0.8644, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 0.7810089020771513, |
|
"grad_norm": 0.9228107333183289, |
|
"learning_rate": 3.5754805735073575e-05, |
|
"loss": 0.8239, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 0.7821958456973294, |
|
"grad_norm": 0.9446859359741211, |
|
"learning_rate": 3.571049418414081e-05, |
|
"loss": 0.8789, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 0.7833827893175074, |
|
"grad_norm": 0.8383123278617859, |
|
"learning_rate": 3.566614138216212e-05, |
|
"loss": 0.8505, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 0.7845697329376855, |
|
"grad_norm": 0.9476184248924255, |
|
"learning_rate": 3.562174749996059e-05, |
|
"loss": 0.8603, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 0.7857566765578635, |
|
"grad_norm": 0.8598759770393372, |
|
"learning_rate": 3.557731270851751e-05, |
|
"loss": 0.8384, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 0.7869436201780415, |
|
"grad_norm": 0.7761598825454712, |
|
"learning_rate": 3.553283717897171e-05, |
|
"loss": 0.8635, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 0.7881305637982196, |
|
"grad_norm": 0.7737132906913757, |
|
"learning_rate": 3.548832108261896e-05, |
|
"loss": 0.8172, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 0.7893175074183977, |
|
"grad_norm": 0.8764254450798035, |
|
"learning_rate": 3.5443764590911234e-05, |
|
"loss": 0.8453, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 0.7905044510385757, |
|
"grad_norm": 0.9228744506835938, |
|
"learning_rate": 3.5399167875456094e-05, |
|
"loss": 0.8332, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 0.7916913946587537, |
|
"grad_norm": 0.9316686987876892, |
|
"learning_rate": 3.5354531108016054e-05, |
|
"loss": 0.8833, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 0.7928783382789317, |
|
"grad_norm": 0.9275509119033813, |
|
"learning_rate": 3.5309854460507845e-05, |
|
"loss": 0.8096, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 0.7940652818991097, |
|
"grad_norm": 0.7147977352142334, |
|
"learning_rate": 3.526513810500182e-05, |
|
"loss": 0.8531, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 0.7952522255192879, |
|
"grad_norm": 0.8468539118766785, |
|
"learning_rate": 3.5220382213721256e-05, |
|
"loss": 0.8305, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 0.7964391691394659, |
|
"grad_norm": 0.9143972992897034, |
|
"learning_rate": 3.517558695904171e-05, |
|
"loss": 0.8462, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 0.7976261127596439, |
|
"grad_norm": 0.8958293199539185, |
|
"learning_rate": 3.513075251349033e-05, |
|
"loss": 0.8374, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 0.7988130563798219, |
|
"grad_norm": 0.9110985398292542, |
|
"learning_rate": 3.508587904974522e-05, |
|
"loss": 0.8081, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.0743153095245361, |
|
"learning_rate": 3.5040966740634746e-05, |
|
"loss": 0.8702, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 0.8011869436201781, |
|
"grad_norm": 0.9236382842063904, |
|
"learning_rate": 3.499601575913691e-05, |
|
"loss": 0.8654, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 0.8023738872403561, |
|
"grad_norm": 0.7747888565063477, |
|
"learning_rate": 3.495102627837863e-05, |
|
"loss": 0.8405, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 0.8035608308605341, |
|
"grad_norm": 1.18403959274292, |
|
"learning_rate": 3.490599847163512e-05, |
|
"loss": 0.9657, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 0.8047477744807122, |
|
"grad_norm": 0.8413063287734985, |
|
"learning_rate": 3.486093251232921e-05, |
|
"loss": 0.816, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 0.8059347181008902, |
|
"grad_norm": 1.244125485420227, |
|
"learning_rate": 3.4815828574030674e-05, |
|
"loss": 0.8854, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 0.8071216617210683, |
|
"grad_norm": 0.6361657977104187, |
|
"learning_rate": 3.477068683045552e-05, |
|
"loss": 0.8278, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 0.8083086053412463, |
|
"grad_norm": 0.6628708243370056, |
|
"learning_rate": 3.472550745546542e-05, |
|
"loss": 0.8873, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 0.8094955489614243, |
|
"grad_norm": 1.2260278463363647, |
|
"learning_rate": 3.468029062306694e-05, |
|
"loss": 0.8603, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 0.8106824925816024, |
|
"grad_norm": 0.9426025748252869, |
|
"learning_rate": 3.4635036507410935e-05, |
|
"loss": 0.8679, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 0.8118694362017804, |
|
"grad_norm": 0.6960638761520386, |
|
"learning_rate": 3.4589745282791844e-05, |
|
"loss": 0.7996, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 0.8130563798219584, |
|
"grad_norm": 0.8688523173332214, |
|
"learning_rate": 3.4544417123647024e-05, |
|
"loss": 0.8351, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 0.8142433234421365, |
|
"grad_norm": 0.9678128957748413, |
|
"learning_rate": 3.4499052204556104e-05, |
|
"loss": 0.9089, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 0.8154302670623146, |
|
"grad_norm": 0.9642847180366516, |
|
"learning_rate": 3.4453650700240276e-05, |
|
"loss": 0.8045, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 0.8166172106824926, |
|
"grad_norm": 0.7829703688621521, |
|
"learning_rate": 3.4408212785561645e-05, |
|
"loss": 0.8584, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 0.8178041543026706, |
|
"grad_norm": 1.0218199491500854, |
|
"learning_rate": 3.436273863552254e-05, |
|
"loss": 0.8441, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 0.8189910979228486, |
|
"grad_norm": 0.9257336854934692, |
|
"learning_rate": 3.431722842526487e-05, |
|
"loss": 0.8729, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 0.8201780415430268, |
|
"grad_norm": 0.9871886372566223, |
|
"learning_rate": 3.427168233006941e-05, |
|
"loss": 0.8093, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 0.8213649851632048, |
|
"grad_norm": 1.0205531120300293, |
|
"learning_rate": 3.422610052535514e-05, |
|
"loss": 0.9016, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 0.8225519287833828, |
|
"grad_norm": 0.848477303981781, |
|
"learning_rate": 3.418048318667858e-05, |
|
"loss": 0.8633, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 0.8237388724035608, |
|
"grad_norm": 1.0038056373596191, |
|
"learning_rate": 3.4134830489733124e-05, |
|
"loss": 0.9181, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 0.8249258160237388, |
|
"grad_norm": 0.6654286980628967, |
|
"learning_rate": 3.408914261034834e-05, |
|
"loss": 0.819, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 0.826112759643917, |
|
"grad_norm": 0.9518380165100098, |
|
"learning_rate": 3.404341972448928e-05, |
|
"loss": 0.8409, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 0.827299703264095, |
|
"grad_norm": 0.8517202734947205, |
|
"learning_rate": 3.399766200825583e-05, |
|
"loss": 0.8002, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 0.828486646884273, |
|
"grad_norm": 0.872999906539917, |
|
"learning_rate": 3.3951869637882045e-05, |
|
"loss": 0.8235, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 0.829673590504451, |
|
"grad_norm": 0.9970819354057312, |
|
"learning_rate": 3.390604278973543e-05, |
|
"loss": 0.8936, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 0.8308605341246291, |
|
"grad_norm": 0.9367091059684753, |
|
"learning_rate": 3.386018164031627e-05, |
|
"loss": 0.8477, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.8308605341246291, |
|
"eval_loss": 0.7821624875068665, |
|
"eval_runtime": 773.75, |
|
"eval_samples_per_second": 2.293, |
|
"eval_steps_per_second": 0.574, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.8320474777448071, |
|
"grad_norm": 1.0959240198135376, |
|
"learning_rate": 3.381428636625698e-05, |
|
"loss": 0.8012, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 0.8332344213649852, |
|
"grad_norm": 0.7259630560874939, |
|
"learning_rate": 3.3768357144321406e-05, |
|
"loss": 0.823, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 0.8344213649851632, |
|
"grad_norm": 0.8429070711135864, |
|
"learning_rate": 3.372239415140413e-05, |
|
"loss": 0.9024, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 0.8356083086053413, |
|
"grad_norm": 0.859829306602478, |
|
"learning_rate": 3.367639756452981e-05, |
|
"loss": 0.8471, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 0.8367952522255193, |
|
"grad_norm": 0.8430136442184448, |
|
"learning_rate": 3.363036756085247e-05, |
|
"loss": 0.7838, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 0.8379821958456973, |
|
"grad_norm": 0.7361006140708923, |
|
"learning_rate": 3.3584304317654866e-05, |
|
"loss": 0.8315, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 0.8391691394658753, |
|
"grad_norm": 0.9041669368743896, |
|
"learning_rate": 3.3538208012347774e-05, |
|
"loss": 0.8307, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 0.8403560830860534, |
|
"grad_norm": 0.8590983152389526, |
|
"learning_rate": 3.349207882246927e-05, |
|
"loss": 0.8197, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 0.8415430267062315, |
|
"grad_norm": 0.8333597779273987, |
|
"learning_rate": 3.3445916925684125e-05, |
|
"loss": 0.8802, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 0.8427299703264095, |
|
"grad_norm": 0.8488454818725586, |
|
"learning_rate": 3.339972249978306e-05, |
|
"loss": 0.8412, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 0.8439169139465875, |
|
"grad_norm": 0.990714430809021, |
|
"learning_rate": 3.335349572268209e-05, |
|
"loss": 0.9064, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 0.8451038575667655, |
|
"grad_norm": 0.9207063317298889, |
|
"learning_rate": 3.3307236772421823e-05, |
|
"loss": 0.8309, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 0.8462908011869437, |
|
"grad_norm": 0.9114146828651428, |
|
"learning_rate": 3.326094582716678e-05, |
|
"loss": 0.8841, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 0.8474777448071217, |
|
"grad_norm": 1.2737470865249634, |
|
"learning_rate": 3.32146230652047e-05, |
|
"loss": 0.8373, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 0.8486646884272997, |
|
"grad_norm": 0.9316775798797607, |
|
"learning_rate": 3.3168268664945886e-05, |
|
"loss": 0.8177, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 0.8498516320474777, |
|
"grad_norm": 0.9175634980201721, |
|
"learning_rate": 3.3121882804922484e-05, |
|
"loss": 0.8933, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 0.8510385756676558, |
|
"grad_norm": 0.9439007639884949, |
|
"learning_rate": 3.3075465663787794e-05, |
|
"loss": 0.7867, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 0.8522255192878339, |
|
"grad_norm": 0.893551230430603, |
|
"learning_rate": 3.30290174203156e-05, |
|
"loss": 0.9118, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 0.8534124629080119, |
|
"grad_norm": 1.0411717891693115, |
|
"learning_rate": 3.29825382533995e-05, |
|
"loss": 0.8291, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 0.8545994065281899, |
|
"grad_norm": 0.8265506029129028, |
|
"learning_rate": 3.293602834205216e-05, |
|
"loss": 0.8228, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 0.8557863501483679, |
|
"grad_norm": 0.9281073808670044, |
|
"learning_rate": 3.2889487865404674e-05, |
|
"loss": 0.8694, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 0.856973293768546, |
|
"grad_norm": 0.7977082133293152, |
|
"learning_rate": 3.284291700270584e-05, |
|
"loss": 0.8242, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 0.858160237388724, |
|
"grad_norm": 0.8856098055839539, |
|
"learning_rate": 3.279631593332151e-05, |
|
"loss": 0.8507, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 0.8593471810089021, |
|
"grad_norm": 1.0277818441390991, |
|
"learning_rate": 3.2749684836733866e-05, |
|
"loss": 0.8775, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 0.8605341246290801, |
|
"grad_norm": 1.0487391948699951, |
|
"learning_rate": 3.270302389254074e-05, |
|
"loss": 0.8404, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 0.8617210682492582, |
|
"grad_norm": 0.8497503995895386, |
|
"learning_rate": 3.26563332804549e-05, |
|
"loss": 0.9356, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 0.8629080118694362, |
|
"grad_norm": 0.7366178631782532, |
|
"learning_rate": 3.2609613180303413e-05, |
|
"loss": 0.8132, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 0.8640949554896142, |
|
"grad_norm": 0.8012394905090332, |
|
"learning_rate": 3.25628637720269e-05, |
|
"loss": 0.7763, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 0.8652818991097923, |
|
"grad_norm": 1.029145359992981, |
|
"learning_rate": 3.251608523567886e-05, |
|
"loss": 0.8116, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 0.8664688427299704, |
|
"grad_norm": 0.9467004537582397, |
|
"learning_rate": 3.246927775142498e-05, |
|
"loss": 0.8444, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 0.8676557863501484, |
|
"grad_norm": 0.8625933527946472, |
|
"learning_rate": 3.242244149954243e-05, |
|
"loss": 0.9053, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 0.8688427299703264, |
|
"grad_norm": 1.0368592739105225, |
|
"learning_rate": 3.237557666041922e-05, |
|
"loss": 0.9067, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 0.8700296735905044, |
|
"grad_norm": 0.760718822479248, |
|
"learning_rate": 3.232868341455339e-05, |
|
"loss": 0.8853, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 0.8712166172106824, |
|
"grad_norm": 0.7736045718193054, |
|
"learning_rate": 3.2281761942552446e-05, |
|
"loss": 0.9119, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 0.8724035608308606, |
|
"grad_norm": 0.9275757670402527, |
|
"learning_rate": 3.223481242513259e-05, |
|
"loss": 0.7932, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 0.8735905044510386, |
|
"grad_norm": 0.8196761012077332, |
|
"learning_rate": 3.218783504311803e-05, |
|
"loss": 0.8893, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 0.8747774480712166, |
|
"grad_norm": 0.8558308482170105, |
|
"learning_rate": 3.21408299774403e-05, |
|
"loss": 0.8948, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 0.8759643916913946, |
|
"grad_norm": 0.7309697270393372, |
|
"learning_rate": 3.209379740913756e-05, |
|
"loss": 0.8142, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 0.8771513353115727, |
|
"grad_norm": 0.7917837500572205, |
|
"learning_rate": 3.204673751935389e-05, |
|
"loss": 0.8557, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 0.8783382789317508, |
|
"grad_norm": 1.0087717771530151, |
|
"learning_rate": 3.199965048933859e-05, |
|
"loss": 0.9215, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 0.8795252225519288, |
|
"grad_norm": 0.9754852056503296, |
|
"learning_rate": 3.195253650044552e-05, |
|
"loss": 0.8229, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 0.8807121661721068, |
|
"grad_norm": 0.8485432863235474, |
|
"learning_rate": 3.1905395734132326e-05, |
|
"loss": 0.8653, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 0.8818991097922849, |
|
"grad_norm": 0.8773938417434692, |
|
"learning_rate": 3.185822837195983e-05, |
|
"loss": 0.9774, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 0.8830860534124629, |
|
"grad_norm": 0.7707668542861938, |
|
"learning_rate": 3.181103459559126e-05, |
|
"loss": 0.8606, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 0.884272997032641, |
|
"grad_norm": 0.7578542828559875, |
|
"learning_rate": 3.176381458679158e-05, |
|
"loss": 0.8229, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 0.885459940652819, |
|
"grad_norm": 0.9707187414169312, |
|
"learning_rate": 3.17165685274268e-05, |
|
"loss": 0.8259, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 0.886646884272997, |
|
"grad_norm": 1.3090546131134033, |
|
"learning_rate": 3.166929659946325e-05, |
|
"loss": 0.8843, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 0.8878338278931751, |
|
"grad_norm": 0.7714403867721558, |
|
"learning_rate": 3.1621998984966894e-05, |
|
"loss": 0.8204, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 0.8890207715133531, |
|
"grad_norm": 1.0389307737350464, |
|
"learning_rate": 3.157467586610262e-05, |
|
"loss": 0.8493, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 0.8902077151335311, |
|
"grad_norm": 0.8754733204841614, |
|
"learning_rate": 3.152732742513357e-05, |
|
"loss": 0.8492, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.8902077151335311, |
|
"eval_loss": 0.7797255516052246, |
|
"eval_runtime": 769.2673, |
|
"eval_samples_per_second": 2.306, |
|
"eval_steps_per_second": 0.577, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.8913946587537092, |
|
"grad_norm": 0.5790079832077026, |
|
"learning_rate": 3.147995384442039e-05, |
|
"loss": 0.7781, |
|
"step": 7510 |
|
}, |
|
{ |
|
"epoch": 0.8925816023738873, |
|
"grad_norm": 0.8539468050003052, |
|
"learning_rate": 3.143255530642056e-05, |
|
"loss": 0.8033, |
|
"step": 7520 |
|
}, |
|
{ |
|
"epoch": 0.8937685459940653, |
|
"grad_norm": 0.7788002490997314, |
|
"learning_rate": 3.138513199368768e-05, |
|
"loss": 0.8488, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 0.8949554896142433, |
|
"grad_norm": 0.6952248215675354, |
|
"learning_rate": 3.133768408887076e-05, |
|
"loss": 0.8358, |
|
"step": 7540 |
|
}, |
|
{ |
|
"epoch": 0.8961424332344213, |
|
"grad_norm": 0.7432988882064819, |
|
"learning_rate": 3.129021177471354e-05, |
|
"loss": 0.8785, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 0.8973293768545995, |
|
"grad_norm": 0.8068860173225403, |
|
"learning_rate": 3.124271523405377e-05, |
|
"loss": 0.8018, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 0.8985163204747775, |
|
"grad_norm": 0.8224064707756042, |
|
"learning_rate": 3.11951946498225e-05, |
|
"loss": 0.7825, |
|
"step": 7570 |
|
}, |
|
{ |
|
"epoch": 0.8997032640949555, |
|
"grad_norm": 1.1241661310195923, |
|
"learning_rate": 3.114765020504339e-05, |
|
"loss": 0.9344, |
|
"step": 7580 |
|
}, |
|
{ |
|
"epoch": 0.9008902077151335, |
|
"grad_norm": 1.0378000736236572, |
|
"learning_rate": 3.110008208283197e-05, |
|
"loss": 0.8613, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 0.9020771513353115, |
|
"grad_norm": 0.9253892302513123, |
|
"learning_rate": 3.1052490466395023e-05, |
|
"loss": 0.8725, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 0.9032640949554896, |
|
"grad_norm": 0.9954454898834229, |
|
"learning_rate": 3.1004875539029755e-05, |
|
"loss": 0.7652, |
|
"step": 7610 |
|
}, |
|
{ |
|
"epoch": 0.9044510385756677, |
|
"grad_norm": 0.9441783428192139, |
|
"learning_rate": 3.0957237484123196e-05, |
|
"loss": 0.9363, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 0.9056379821958457, |
|
"grad_norm": 0.8821224570274353, |
|
"learning_rate": 3.090957648515142e-05, |
|
"loss": 0.9007, |
|
"step": 7630 |
|
}, |
|
{ |
|
"epoch": 0.9068249258160237, |
|
"grad_norm": 0.8489567041397095, |
|
"learning_rate": 3.086189272567891e-05, |
|
"loss": 0.9078, |
|
"step": 7640 |
|
}, |
|
{ |
|
"epoch": 0.9080118694362018, |
|
"grad_norm": 0.894049882888794, |
|
"learning_rate": 3.0814186389357765e-05, |
|
"loss": 0.8681, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 0.9091988130563798, |
|
"grad_norm": 1.0218708515167236, |
|
"learning_rate": 3.076645765992706e-05, |
|
"loss": 0.8021, |
|
"step": 7660 |
|
}, |
|
{ |
|
"epoch": 0.9103857566765579, |
|
"grad_norm": 1.236533761024475, |
|
"learning_rate": 3.0718706721212116e-05, |
|
"loss": 0.8619, |
|
"step": 7670 |
|
}, |
|
{ |
|
"epoch": 0.9115727002967359, |
|
"grad_norm": 0.8600239157676697, |
|
"learning_rate": 3.067093375712379e-05, |
|
"loss": 0.7805, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.912759643916914, |
|
"grad_norm": 0.8407261371612549, |
|
"learning_rate": 3.062313895165777e-05, |
|
"loss": 0.834, |
|
"step": 7690 |
|
}, |
|
{ |
|
"epoch": 0.913946587537092, |
|
"grad_norm": 1.0202083587646484, |
|
"learning_rate": 3.057532248889386e-05, |
|
"loss": 0.8123, |
|
"step": 7700 |
|
}, |
|
{ |
|
"epoch": 0.91513353115727, |
|
"grad_norm": 1.0518484115600586, |
|
"learning_rate": 3.052748455299529e-05, |
|
"loss": 0.7799, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 0.916320474777448, |
|
"grad_norm": 0.6867684721946716, |
|
"learning_rate": 3.047962532820797e-05, |
|
"loss": 0.7433, |
|
"step": 7720 |
|
}, |
|
{ |
|
"epoch": 0.9175074183976261, |
|
"grad_norm": 0.8702245354652405, |
|
"learning_rate": 3.0431744998859834e-05, |
|
"loss": 0.9001, |
|
"step": 7730 |
|
}, |
|
{ |
|
"epoch": 0.9186943620178042, |
|
"grad_norm": 0.7238080501556396, |
|
"learning_rate": 3.0383843749360063e-05, |
|
"loss": 0.8513, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 0.9198813056379822, |
|
"grad_norm": 0.8329240083694458, |
|
"learning_rate": 3.0335921764198437e-05, |
|
"loss": 0.8731, |
|
"step": 7750 |
|
}, |
|
{ |
|
"epoch": 0.9210682492581602, |
|
"grad_norm": 0.6718214750289917, |
|
"learning_rate": 3.02879792279446e-05, |
|
"loss": 0.796, |
|
"step": 7760 |
|
}, |
|
{ |
|
"epoch": 0.9222551928783382, |
|
"grad_norm": 0.8724163770675659, |
|
"learning_rate": 3.0240016325247332e-05, |
|
"loss": 0.812, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 0.9234421364985164, |
|
"grad_norm": 0.820673942565918, |
|
"learning_rate": 3.019203324083386e-05, |
|
"loss": 0.8487, |
|
"step": 7780 |
|
}, |
|
{ |
|
"epoch": 0.9246290801186944, |
|
"grad_norm": 0.8288638591766357, |
|
"learning_rate": 3.0144030159509145e-05, |
|
"loss": 0.9421, |
|
"step": 7790 |
|
}, |
|
{ |
|
"epoch": 0.9258160237388724, |
|
"grad_norm": 0.8184947371482849, |
|
"learning_rate": 3.0096007266155167e-05, |
|
"loss": 0.8175, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 0.9270029673590504, |
|
"grad_norm": 0.7271890044212341, |
|
"learning_rate": 3.0047964745730188e-05, |
|
"loss": 0.7866, |
|
"step": 7810 |
|
}, |
|
{ |
|
"epoch": 0.9281899109792285, |
|
"grad_norm": 0.8679256439208984, |
|
"learning_rate": 2.9999902783268096e-05, |
|
"loss": 0.9335, |
|
"step": 7820 |
|
}, |
|
{ |
|
"epoch": 0.9293768545994066, |
|
"grad_norm": 0.9273269772529602, |
|
"learning_rate": 2.995182156387763e-05, |
|
"loss": 0.8438, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 0.9305637982195846, |
|
"grad_norm": 1.0963612794876099, |
|
"learning_rate": 2.9903721272741726e-05, |
|
"loss": 0.8234, |
|
"step": 7840 |
|
}, |
|
{ |
|
"epoch": 0.9317507418397626, |
|
"grad_norm": 0.8465256094932556, |
|
"learning_rate": 2.9855602095116746e-05, |
|
"loss": 0.9186, |
|
"step": 7850 |
|
}, |
|
{ |
|
"epoch": 0.9329376854599406, |
|
"grad_norm": 0.6978071331977844, |
|
"learning_rate": 2.9807464216331815e-05, |
|
"loss": 0.7885, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 0.9341246290801187, |
|
"grad_norm": 0.7609380483627319, |
|
"learning_rate": 2.975930782178807e-05, |
|
"loss": 0.8534, |
|
"step": 7870 |
|
}, |
|
{ |
|
"epoch": 0.9353115727002967, |
|
"grad_norm": 1.0055066347122192, |
|
"learning_rate": 2.9711133096957962e-05, |
|
"loss": 0.886, |
|
"step": 7880 |
|
}, |
|
{ |
|
"epoch": 0.9364985163204748, |
|
"grad_norm": 0.7272253036499023, |
|
"learning_rate": 2.9662940227384544e-05, |
|
"loss": 0.8306, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 0.9376854599406528, |
|
"grad_norm": 0.877644419670105, |
|
"learning_rate": 2.961472939868075e-05, |
|
"loss": 0.8807, |
|
"step": 7900 |
|
}, |
|
{ |
|
"epoch": 0.9388724035608309, |
|
"grad_norm": 0.8707640171051025, |
|
"learning_rate": 2.95665007965287e-05, |
|
"loss": 0.9264, |
|
"step": 7910 |
|
}, |
|
{ |
|
"epoch": 0.9400593471810089, |
|
"grad_norm": 0.9951992034912109, |
|
"learning_rate": 2.9518254606678935e-05, |
|
"loss": 0.8345, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 0.9412462908011869, |
|
"grad_norm": 0.9738534092903137, |
|
"learning_rate": 2.946999101494976e-05, |
|
"loss": 0.8475, |
|
"step": 7930 |
|
}, |
|
{ |
|
"epoch": 0.942433234421365, |
|
"grad_norm": 0.8127856254577637, |
|
"learning_rate": 2.94217102072265e-05, |
|
"loss": 0.7886, |
|
"step": 7940 |
|
}, |
|
{ |
|
"epoch": 0.9436201780415431, |
|
"grad_norm": 0.8734619617462158, |
|
"learning_rate": 2.9373412369460774e-05, |
|
"loss": 0.8797, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 0.9448071216617211, |
|
"grad_norm": 0.7642955780029297, |
|
"learning_rate": 2.93250976876698e-05, |
|
"loss": 0.782, |
|
"step": 7960 |
|
}, |
|
{ |
|
"epoch": 0.9459940652818991, |
|
"grad_norm": 1.211449146270752, |
|
"learning_rate": 2.927676634793567e-05, |
|
"loss": 0.8433, |
|
"step": 7970 |
|
}, |
|
{ |
|
"epoch": 0.9471810089020771, |
|
"grad_norm": 1.0548808574676514, |
|
"learning_rate": 2.9228418536404633e-05, |
|
"loss": 0.8425, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 0.9483679525222551, |
|
"grad_norm": 0.7786169052124023, |
|
"learning_rate": 2.9180054439286377e-05, |
|
"loss": 0.7831, |
|
"step": 7990 |
|
}, |
|
{ |
|
"epoch": 0.9495548961424333, |
|
"grad_norm": 0.7001704573631287, |
|
"learning_rate": 2.9131674242853318e-05, |
|
"loss": 0.8113, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.9495548961424333, |
|
"eval_loss": 0.7776389718055725, |
|
"eval_runtime": 768.6108, |
|
"eval_samples_per_second": 2.308, |
|
"eval_steps_per_second": 0.578, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.9507418397626113, |
|
"grad_norm": 0.7324917316436768, |
|
"learning_rate": 2.9083278133439883e-05, |
|
"loss": 0.8663, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 0.9519287833827893, |
|
"grad_norm": 1.0254888534545898, |
|
"learning_rate": 2.903486629744176e-05, |
|
"loss": 0.8569, |
|
"step": 8020 |
|
}, |
|
{ |
|
"epoch": 0.9531157270029673, |
|
"grad_norm": 1.0356833934783936, |
|
"learning_rate": 2.8986438921315234e-05, |
|
"loss": 0.8032, |
|
"step": 8030 |
|
}, |
|
{ |
|
"epoch": 0.9543026706231454, |
|
"grad_norm": 0.8793463706970215, |
|
"learning_rate": 2.8937996191576432e-05, |
|
"loss": 0.841, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 0.9554896142433235, |
|
"grad_norm": 0.891936182975769, |
|
"learning_rate": 2.888953829480062e-05, |
|
"loss": 0.7662, |
|
"step": 8050 |
|
}, |
|
{ |
|
"epoch": 0.9566765578635015, |
|
"grad_norm": 1.5397156476974487, |
|
"learning_rate": 2.884106541762148e-05, |
|
"loss": 0.8108, |
|
"step": 8060 |
|
}, |
|
{ |
|
"epoch": 0.9578635014836795, |
|
"grad_norm": 0.9605924487113953, |
|
"learning_rate": 2.8792577746730375e-05, |
|
"loss": 0.8661, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 0.9590504451038576, |
|
"grad_norm": 0.9698401093482971, |
|
"learning_rate": 2.8744075468875664e-05, |
|
"loss": 0.8318, |
|
"step": 8080 |
|
}, |
|
{ |
|
"epoch": 0.9602373887240356, |
|
"grad_norm": 0.7636862993240356, |
|
"learning_rate": 2.8695558770861956e-05, |
|
"loss": 0.8516, |
|
"step": 8090 |
|
}, |
|
{ |
|
"epoch": 0.9614243323442137, |
|
"grad_norm": 0.8955727815628052, |
|
"learning_rate": 2.8647027839549407e-05, |
|
"loss": 0.755, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 0.9626112759643917, |
|
"grad_norm": 1.0949968099594116, |
|
"learning_rate": 2.8598482861852976e-05, |
|
"loss": 0.8672, |
|
"step": 8110 |
|
}, |
|
{ |
|
"epoch": 0.9637982195845697, |
|
"grad_norm": 0.9382548928260803, |
|
"learning_rate": 2.8549924024741736e-05, |
|
"loss": 0.811, |
|
"step": 8120 |
|
}, |
|
{ |
|
"epoch": 0.9649851632047478, |
|
"grad_norm": 0.6319847106933594, |
|
"learning_rate": 2.8501351515238135e-05, |
|
"loss": 0.826, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 0.9661721068249258, |
|
"grad_norm": 0.8856794834136963, |
|
"learning_rate": 2.8452765520417264e-05, |
|
"loss": 0.8287, |
|
"step": 8140 |
|
}, |
|
{ |
|
"epoch": 0.9673590504451038, |
|
"grad_norm": 0.9224793314933777, |
|
"learning_rate": 2.840416622740617e-05, |
|
"loss": 0.834, |
|
"step": 8150 |
|
}, |
|
{ |
|
"epoch": 0.9685459940652819, |
|
"grad_norm": 0.8026999831199646, |
|
"learning_rate": 2.8355553823383118e-05, |
|
"loss": 0.9407, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 0.96973293768546, |
|
"grad_norm": 0.9832804203033447, |
|
"learning_rate": 2.8306928495576868e-05, |
|
"loss": 0.8656, |
|
"step": 8170 |
|
}, |
|
{ |
|
"epoch": 0.970919881305638, |
|
"grad_norm": 0.9716234803199768, |
|
"learning_rate": 2.8258290431265944e-05, |
|
"loss": 0.9096, |
|
"step": 8180 |
|
}, |
|
{ |
|
"epoch": 0.972106824925816, |
|
"grad_norm": 0.8760083317756653, |
|
"learning_rate": 2.820963981777794e-05, |
|
"loss": 0.8766, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 0.973293768545994, |
|
"grad_norm": 1.0046391487121582, |
|
"learning_rate": 2.816097684248877e-05, |
|
"loss": 0.895, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 0.9744807121661722, |
|
"grad_norm": 0.6522348523139954, |
|
"learning_rate": 2.8112301692821964e-05, |
|
"loss": 0.7833, |
|
"step": 8210 |
|
}, |
|
{ |
|
"epoch": 0.9756676557863502, |
|
"grad_norm": 1.0381343364715576, |
|
"learning_rate": 2.8063614556247948e-05, |
|
"loss": 0.8459, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 0.9768545994065282, |
|
"grad_norm": 0.6645428538322449, |
|
"learning_rate": 2.8014915620283294e-05, |
|
"loss": 0.9006, |
|
"step": 8230 |
|
}, |
|
{ |
|
"epoch": 0.9780415430267062, |
|
"grad_norm": 0.9998354315757751, |
|
"learning_rate": 2.796620507249005e-05, |
|
"loss": 0.7815, |
|
"step": 8240 |
|
}, |
|
{ |
|
"epoch": 0.9792284866468842, |
|
"grad_norm": 0.9861043095588684, |
|
"learning_rate": 2.7917483100474955e-05, |
|
"loss": 0.9115, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 0.9804154302670623, |
|
"grad_norm": 1.03464937210083, |
|
"learning_rate": 2.7868749891888767e-05, |
|
"loss": 0.7882, |
|
"step": 8260 |
|
}, |
|
{ |
|
"epoch": 0.9816023738872404, |
|
"grad_norm": 0.8017896413803101, |
|
"learning_rate": 2.7820005634425528e-05, |
|
"loss": 0.8348, |
|
"step": 8270 |
|
}, |
|
{ |
|
"epoch": 0.9827893175074184, |
|
"grad_norm": 1.063607931137085, |
|
"learning_rate": 2.7771250515821807e-05, |
|
"loss": 0.9464, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 0.9839762611275964, |
|
"grad_norm": 1.0467710494995117, |
|
"learning_rate": 2.772248472385603e-05, |
|
"loss": 0.8656, |
|
"step": 8290 |
|
}, |
|
{ |
|
"epoch": 0.9851632047477745, |
|
"grad_norm": 0.7622478604316711, |
|
"learning_rate": 2.7673708446347712e-05, |
|
"loss": 0.8404, |
|
"step": 8300 |
|
}, |
|
{ |
|
"epoch": 0.9863501483679525, |
|
"grad_norm": 0.7604171633720398, |
|
"learning_rate": 2.7624921871156772e-05, |
|
"loss": 0.8404, |
|
"step": 8310 |
|
}, |
|
{ |
|
"epoch": 0.9875370919881306, |
|
"grad_norm": 1.0093992948532104, |
|
"learning_rate": 2.7576125186182772e-05, |
|
"loss": 0.8871, |
|
"step": 8320 |
|
}, |
|
{ |
|
"epoch": 0.9887240356083086, |
|
"grad_norm": 0.5812623500823975, |
|
"learning_rate": 2.752731857936423e-05, |
|
"loss": 0.8223, |
|
"step": 8330 |
|
}, |
|
{ |
|
"epoch": 0.9899109792284867, |
|
"grad_norm": 0.5302821397781372, |
|
"learning_rate": 2.7478502238677862e-05, |
|
"loss": 0.8357, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 0.9910979228486647, |
|
"grad_norm": 0.8963286876678467, |
|
"learning_rate": 2.7429676352137885e-05, |
|
"loss": 0.9103, |
|
"step": 8350 |
|
}, |
|
{ |
|
"epoch": 0.9922848664688427, |
|
"grad_norm": 0.8985591530799866, |
|
"learning_rate": 2.738084110779526e-05, |
|
"loss": 0.9101, |
|
"step": 8360 |
|
}, |
|
{ |
|
"epoch": 0.9934718100890207, |
|
"grad_norm": 1.0268195867538452, |
|
"learning_rate": 2.733199669373701e-05, |
|
"loss": 0.8353, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 0.9946587537091988, |
|
"grad_norm": 0.897013783454895, |
|
"learning_rate": 2.7283143298085474e-05, |
|
"loss": 0.8526, |
|
"step": 8380 |
|
}, |
|
{ |
|
"epoch": 0.9958456973293769, |
|
"grad_norm": 1.0083746910095215, |
|
"learning_rate": 2.723428110899757e-05, |
|
"loss": 0.8349, |
|
"step": 8390 |
|
}, |
|
{ |
|
"epoch": 0.9970326409495549, |
|
"grad_norm": 0.9338353276252747, |
|
"learning_rate": 2.7185410314664107e-05, |
|
"loss": 0.8304, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 0.9982195845697329, |
|
"grad_norm": 1.0432991981506348, |
|
"learning_rate": 2.713653110330901e-05, |
|
"loss": 0.8171, |
|
"step": 8410 |
|
}, |
|
{ |
|
"epoch": 0.9994065281899109, |
|
"grad_norm": 1.0495964288711548, |
|
"learning_rate": 2.7087643663188633e-05, |
|
"loss": 0.8842, |
|
"step": 8420 |
|
}, |
|
{ |
|
"epoch": 1.000593471810089, |
|
"grad_norm": 0.9517408609390259, |
|
"learning_rate": 2.7038748182591034e-05, |
|
"loss": 0.9287, |
|
"step": 8430 |
|
}, |
|
{ |
|
"epoch": 1.001780415430267, |
|
"grad_norm": 0.7760940790176392, |
|
"learning_rate": 2.698984484983522e-05, |
|
"loss": 0.783, |
|
"step": 8440 |
|
}, |
|
{ |
|
"epoch": 1.002967359050445, |
|
"grad_norm": 0.9729003310203552, |
|
"learning_rate": 2.694093385327045e-05, |
|
"loss": 0.8654, |
|
"step": 8450 |
|
}, |
|
{ |
|
"epoch": 1.0041543026706232, |
|
"grad_norm": 1.066876769065857, |
|
"learning_rate": 2.6892015381275493e-05, |
|
"loss": 0.8803, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 1.0053412462908011, |
|
"grad_norm": 0.8508303165435791, |
|
"learning_rate": 2.684308962225793e-05, |
|
"loss": 0.8473, |
|
"step": 8470 |
|
}, |
|
{ |
|
"epoch": 1.0065281899109793, |
|
"grad_norm": 0.95997154712677, |
|
"learning_rate": 2.6794156764653367e-05, |
|
"loss": 0.8053, |
|
"step": 8480 |
|
}, |
|
{ |
|
"epoch": 1.0077151335311574, |
|
"grad_norm": 0.8591273427009583, |
|
"learning_rate": 2.67452169969248e-05, |
|
"loss": 0.8858, |
|
"step": 8490 |
|
}, |
|
{ |
|
"epoch": 1.0089020771513353, |
|
"grad_norm": 1.0032804012298584, |
|
"learning_rate": 2.66962705075618e-05, |
|
"loss": 0.7683, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 1.0089020771513353, |
|
"eval_loss": 0.7766696810722351, |
|
"eval_runtime": 769.9177, |
|
"eval_samples_per_second": 2.304, |
|
"eval_steps_per_second": 0.577, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 1.0100890207715134, |
|
"grad_norm": 1.2956509590148926, |
|
"learning_rate": 2.6647317485079843e-05, |
|
"loss": 0.8622, |
|
"step": 8510 |
|
}, |
|
{ |
|
"epoch": 1.0112759643916913, |
|
"grad_norm": 0.9024373292922974, |
|
"learning_rate": 2.659835811801956e-05, |
|
"loss": 0.8628, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 1.0124629080118694, |
|
"grad_norm": 1.0521224737167358, |
|
"learning_rate": 2.6549392594946037e-05, |
|
"loss": 0.8375, |
|
"step": 8530 |
|
}, |
|
{ |
|
"epoch": 1.0136498516320476, |
|
"grad_norm": 0.8524640202522278, |
|
"learning_rate": 2.650042110444804e-05, |
|
"loss": 0.8161, |
|
"step": 8540 |
|
}, |
|
{ |
|
"epoch": 1.0148367952522255, |
|
"grad_norm": 0.8786371350288391, |
|
"learning_rate": 2.6451443835137342e-05, |
|
"loss": 0.8241, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 1.0160237388724036, |
|
"grad_norm": 0.993325412273407, |
|
"learning_rate": 2.640246097564796e-05, |
|
"loss": 0.827, |
|
"step": 8560 |
|
}, |
|
{ |
|
"epoch": 1.0172106824925815, |
|
"grad_norm": 0.9485884308815002, |
|
"learning_rate": 2.635347271463544e-05, |
|
"loss": 0.8, |
|
"step": 8570 |
|
}, |
|
{ |
|
"epoch": 1.0183976261127596, |
|
"grad_norm": 0.879743218421936, |
|
"learning_rate": 2.6304479240776147e-05, |
|
"loss": 0.7966, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 1.0195845697329378, |
|
"grad_norm": 0.9597845673561096, |
|
"learning_rate": 2.625548074276651e-05, |
|
"loss": 0.9103, |
|
"step": 8590 |
|
}, |
|
{ |
|
"epoch": 1.0207715133531157, |
|
"grad_norm": 0.7718109488487244, |
|
"learning_rate": 2.6206477409322306e-05, |
|
"loss": 0.8311, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 1.0219584569732938, |
|
"grad_norm": 0.8141760230064392, |
|
"learning_rate": 2.615746942917795e-05, |
|
"loss": 0.8162, |
|
"step": 8610 |
|
}, |
|
{ |
|
"epoch": 1.0231454005934717, |
|
"grad_norm": 1.0272105932235718, |
|
"learning_rate": 2.610845699108574e-05, |
|
"loss": 0.8109, |
|
"step": 8620 |
|
}, |
|
{ |
|
"epoch": 1.0243323442136498, |
|
"grad_norm": 1.010335922241211, |
|
"learning_rate": 2.6059440283815146e-05, |
|
"loss": 0.786, |
|
"step": 8630 |
|
}, |
|
{ |
|
"epoch": 1.025519287833828, |
|
"grad_norm": 0.9021561741828918, |
|
"learning_rate": 2.6010419496152083e-05, |
|
"loss": 0.8407, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 1.0267062314540059, |
|
"grad_norm": 0.8104283809661865, |
|
"learning_rate": 2.5961394816898184e-05, |
|
"loss": 0.8011, |
|
"step": 8650 |
|
}, |
|
{ |
|
"epoch": 1.027893175074184, |
|
"grad_norm": 0.7947595715522766, |
|
"learning_rate": 2.5912366434870066e-05, |
|
"loss": 0.7901, |
|
"step": 8660 |
|
}, |
|
{ |
|
"epoch": 1.0290801186943621, |
|
"grad_norm": 0.8429394960403442, |
|
"learning_rate": 2.586333453889861e-05, |
|
"loss": 0.8268, |
|
"step": 8670 |
|
}, |
|
{ |
|
"epoch": 1.03026706231454, |
|
"grad_norm": 1.1205742359161377, |
|
"learning_rate": 2.5814299317828234e-05, |
|
"loss": 0.8265, |
|
"step": 8680 |
|
}, |
|
{ |
|
"epoch": 1.0314540059347181, |
|
"grad_norm": 0.9771138429641724, |
|
"learning_rate": 2.576526096051615e-05, |
|
"loss": 0.8365, |
|
"step": 8690 |
|
}, |
|
{ |
|
"epoch": 1.032640949554896, |
|
"grad_norm": 1.1407651901245117, |
|
"learning_rate": 2.571621965583166e-05, |
|
"loss": 0.8202, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 1.0338278931750742, |
|
"grad_norm": 0.9523341059684753, |
|
"learning_rate": 2.5667175592655413e-05, |
|
"loss": 0.8774, |
|
"step": 8710 |
|
}, |
|
{ |
|
"epoch": 1.0350148367952523, |
|
"grad_norm": 1.2493401765823364, |
|
"learning_rate": 2.5618128959878686e-05, |
|
"loss": 0.8825, |
|
"step": 8720 |
|
}, |
|
{ |
|
"epoch": 1.0362017804154302, |
|
"grad_norm": 1.1929856538772583, |
|
"learning_rate": 2.556907994640264e-05, |
|
"loss": 0.8126, |
|
"step": 8730 |
|
}, |
|
{ |
|
"epoch": 1.0373887240356083, |
|
"grad_norm": 0.6640522480010986, |
|
"learning_rate": 2.5520028741137636e-05, |
|
"loss": 0.7949, |
|
"step": 8740 |
|
}, |
|
{ |
|
"epoch": 1.0385756676557865, |
|
"grad_norm": 0.9167020320892334, |
|
"learning_rate": 2.547097553300245e-05, |
|
"loss": 0.7853, |
|
"step": 8750 |
|
}, |
|
{ |
|
"epoch": 1.0397626112759644, |
|
"grad_norm": 1.055112361907959, |
|
"learning_rate": 2.5421920510923568e-05, |
|
"loss": 0.8087, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 1.0409495548961425, |
|
"grad_norm": 0.9777671098709106, |
|
"learning_rate": 2.5372863863834484e-05, |
|
"loss": 0.7782, |
|
"step": 8770 |
|
}, |
|
{ |
|
"epoch": 1.0421364985163204, |
|
"grad_norm": 0.8514682054519653, |
|
"learning_rate": 2.532380578067493e-05, |
|
"loss": 0.8282, |
|
"step": 8780 |
|
}, |
|
{ |
|
"epoch": 1.0433234421364985, |
|
"grad_norm": 1.0609797239303589, |
|
"learning_rate": 2.5274746450390186e-05, |
|
"loss": 0.8202, |
|
"step": 8790 |
|
}, |
|
{ |
|
"epoch": 1.0445103857566767, |
|
"grad_norm": 0.8998866081237793, |
|
"learning_rate": 2.5225686061930326e-05, |
|
"loss": 0.8358, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 1.0456973293768546, |
|
"grad_norm": 0.9781126379966736, |
|
"learning_rate": 2.51766248042495e-05, |
|
"loss": 0.8162, |
|
"step": 8810 |
|
}, |
|
{ |
|
"epoch": 1.0468842729970327, |
|
"grad_norm": 0.8800467252731323, |
|
"learning_rate": 2.5127562866305206e-05, |
|
"loss": 0.8072, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 1.0480712166172106, |
|
"grad_norm": 1.0159127712249756, |
|
"learning_rate": 2.5078500437057556e-05, |
|
"loss": 0.7879, |
|
"step": 8830 |
|
}, |
|
{ |
|
"epoch": 1.0492581602373887, |
|
"grad_norm": 0.8794515132904053, |
|
"learning_rate": 2.5029437705468577e-05, |
|
"loss": 0.7577, |
|
"step": 8840 |
|
}, |
|
{ |
|
"epoch": 1.0504451038575668, |
|
"grad_norm": 1.0653094053268433, |
|
"learning_rate": 2.4980374860501436e-05, |
|
"loss": 0.8209, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 1.0516320474777447, |
|
"grad_norm": 0.8193674683570862, |
|
"learning_rate": 2.493131209111974e-05, |
|
"loss": 0.796, |
|
"step": 8860 |
|
}, |
|
{ |
|
"epoch": 1.0528189910979229, |
|
"grad_norm": 0.8971123099327087, |
|
"learning_rate": 2.4882249586286825e-05, |
|
"loss": 0.8305, |
|
"step": 8870 |
|
}, |
|
{ |
|
"epoch": 1.0540059347181008, |
|
"grad_norm": 1.074666976928711, |
|
"learning_rate": 2.4833187534964975e-05, |
|
"loss": 0.8023, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 1.055192878338279, |
|
"grad_norm": 0.8462083339691162, |
|
"learning_rate": 2.4784126126114766e-05, |
|
"loss": 0.6995, |
|
"step": 8890 |
|
}, |
|
{ |
|
"epoch": 1.056379821958457, |
|
"grad_norm": 0.9592199921607971, |
|
"learning_rate": 2.4735065548694263e-05, |
|
"loss": 0.8354, |
|
"step": 8900 |
|
}, |
|
{ |
|
"epoch": 1.057566765578635, |
|
"grad_norm": 1.0060994625091553, |
|
"learning_rate": 2.4686005991658345e-05, |
|
"loss": 0.8547, |
|
"step": 8910 |
|
}, |
|
{ |
|
"epoch": 1.058753709198813, |
|
"grad_norm": 1.0199803113937378, |
|
"learning_rate": 2.463694764395797e-05, |
|
"loss": 0.7854, |
|
"step": 8920 |
|
}, |
|
{ |
|
"epoch": 1.0599406528189912, |
|
"grad_norm": 1.029325008392334, |
|
"learning_rate": 2.458789069453942e-05, |
|
"loss": 0.8594, |
|
"step": 8930 |
|
}, |
|
{ |
|
"epoch": 1.061127596439169, |
|
"grad_norm": 0.9073032140731812, |
|
"learning_rate": 2.453883533234361e-05, |
|
"loss": 0.8842, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 1.0623145400593472, |
|
"grad_norm": 0.9671692252159119, |
|
"learning_rate": 2.4489781746305326e-05, |
|
"loss": 0.7664, |
|
"step": 8950 |
|
}, |
|
{ |
|
"epoch": 1.0635014836795251, |
|
"grad_norm": 0.9562916159629822, |
|
"learning_rate": 2.4440730125352528e-05, |
|
"loss": 0.7997, |
|
"step": 8960 |
|
}, |
|
{ |
|
"epoch": 1.0646884272997033, |
|
"grad_norm": 1.353378176689148, |
|
"learning_rate": 2.4391680658405585e-05, |
|
"loss": 0.7817, |
|
"step": 8970 |
|
}, |
|
{ |
|
"epoch": 1.0658753709198814, |
|
"grad_norm": 1.0125224590301514, |
|
"learning_rate": 2.4342633534376606e-05, |
|
"loss": 0.7787, |
|
"step": 8980 |
|
}, |
|
{ |
|
"epoch": 1.0670623145400593, |
|
"grad_norm": 0.8546326160430908, |
|
"learning_rate": 2.4293588942168626e-05, |
|
"loss": 0.7713, |
|
"step": 8990 |
|
}, |
|
{ |
|
"epoch": 1.0682492581602374, |
|
"grad_norm": 1.0358268022537231, |
|
"learning_rate": 2.424454707067499e-05, |
|
"loss": 0.7992, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.0682492581602374, |
|
"eval_loss": 0.7761093974113464, |
|
"eval_runtime": 766.9914, |
|
"eval_samples_per_second": 2.313, |
|
"eval_steps_per_second": 0.579, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.0694362017804155, |
|
"grad_norm": 1.4168592691421509, |
|
"learning_rate": 2.4195508108778496e-05, |
|
"loss": 0.8385, |
|
"step": 9010 |
|
}, |
|
{ |
|
"epoch": 1.0706231454005934, |
|
"grad_norm": 0.9119871854782104, |
|
"learning_rate": 2.4146472245350805e-05, |
|
"loss": 0.7911, |
|
"step": 9020 |
|
}, |
|
{ |
|
"epoch": 1.0718100890207716, |
|
"grad_norm": 1.2158715724945068, |
|
"learning_rate": 2.4097439669251595e-05, |
|
"loss": 0.7497, |
|
"step": 9030 |
|
}, |
|
{ |
|
"epoch": 1.0729970326409495, |
|
"grad_norm": 0.9663563966751099, |
|
"learning_rate": 2.4048410569327892e-05, |
|
"loss": 0.7418, |
|
"step": 9040 |
|
}, |
|
{ |
|
"epoch": 1.0741839762611276, |
|
"grad_norm": 0.8806156516075134, |
|
"learning_rate": 2.399938513441336e-05, |
|
"loss": 0.7516, |
|
"step": 9050 |
|
}, |
|
{ |
|
"epoch": 1.0753709198813057, |
|
"grad_norm": 1.1792296171188354, |
|
"learning_rate": 2.3950363553327507e-05, |
|
"loss": 0.8281, |
|
"step": 9060 |
|
}, |
|
{ |
|
"epoch": 1.0765578635014836, |
|
"grad_norm": 0.8951212763786316, |
|
"learning_rate": 2.3901346014875037e-05, |
|
"loss": 0.8098, |
|
"step": 9070 |
|
}, |
|
{ |
|
"epoch": 1.0777448071216618, |
|
"grad_norm": 0.8044575452804565, |
|
"learning_rate": 2.3852332707845047e-05, |
|
"loss": 0.779, |
|
"step": 9080 |
|
}, |
|
{ |
|
"epoch": 1.0789317507418397, |
|
"grad_norm": 1.0501245260238647, |
|
"learning_rate": 2.380332382101038e-05, |
|
"loss": 0.8397, |
|
"step": 9090 |
|
}, |
|
{ |
|
"epoch": 1.0801186943620178, |
|
"grad_norm": 1.2533349990844727, |
|
"learning_rate": 2.375431954312681e-05, |
|
"loss": 0.8961, |
|
"step": 9100 |
|
}, |
|
{ |
|
"epoch": 1.081305637982196, |
|
"grad_norm": 1.1930317878723145, |
|
"learning_rate": 2.370532006293239e-05, |
|
"loss": 0.8086, |
|
"step": 9110 |
|
}, |
|
{ |
|
"epoch": 1.0824925816023738, |
|
"grad_norm": 0.9481077790260315, |
|
"learning_rate": 2.365632556914668e-05, |
|
"loss": 0.7704, |
|
"step": 9120 |
|
}, |
|
{ |
|
"epoch": 1.083679525222552, |
|
"grad_norm": 0.9799014925956726, |
|
"learning_rate": 2.360733625047005e-05, |
|
"loss": 0.8051, |
|
"step": 9130 |
|
}, |
|
{ |
|
"epoch": 1.0848664688427299, |
|
"grad_norm": 0.669889509677887, |
|
"learning_rate": 2.355835229558292e-05, |
|
"loss": 0.829, |
|
"step": 9140 |
|
}, |
|
{ |
|
"epoch": 1.086053412462908, |
|
"grad_norm": 0.7075235843658447, |
|
"learning_rate": 2.350937389314506e-05, |
|
"loss": 0.7304, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 1.0872403560830861, |
|
"grad_norm": 0.8748484253883362, |
|
"learning_rate": 2.3460401231794865e-05, |
|
"loss": 0.7795, |
|
"step": 9160 |
|
}, |
|
{ |
|
"epoch": 1.088427299703264, |
|
"grad_norm": 1.0108648538589478, |
|
"learning_rate": 2.3411434500148592e-05, |
|
"loss": 0.8265, |
|
"step": 9170 |
|
}, |
|
{ |
|
"epoch": 1.0896142433234421, |
|
"grad_norm": 1.1676548719406128, |
|
"learning_rate": 2.3362473886799682e-05, |
|
"loss": 0.8811, |
|
"step": 9180 |
|
}, |
|
{ |
|
"epoch": 1.0908011869436203, |
|
"grad_norm": 0.9329530000686646, |
|
"learning_rate": 2.3313519580318025e-05, |
|
"loss": 0.795, |
|
"step": 9190 |
|
}, |
|
{ |
|
"epoch": 1.0919881305637982, |
|
"grad_norm": 1.159879207611084, |
|
"learning_rate": 2.3264571769249175e-05, |
|
"loss": 0.7857, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 1.0931750741839763, |
|
"grad_norm": 0.8164434432983398, |
|
"learning_rate": 2.3215630642113716e-05, |
|
"loss": 0.7752, |
|
"step": 9210 |
|
}, |
|
{ |
|
"epoch": 1.0943620178041542, |
|
"grad_norm": 0.9076749682426453, |
|
"learning_rate": 2.316669638740645e-05, |
|
"loss": 0.827, |
|
"step": 9220 |
|
}, |
|
{ |
|
"epoch": 1.0955489614243323, |
|
"grad_norm": 0.9610769748687744, |
|
"learning_rate": 2.3117769193595756e-05, |
|
"loss": 0.8259, |
|
"step": 9230 |
|
}, |
|
{ |
|
"epoch": 1.0967359050445105, |
|
"grad_norm": 1.0237845182418823, |
|
"learning_rate": 2.3068849249122763e-05, |
|
"loss": 0.7899, |
|
"step": 9240 |
|
}, |
|
{ |
|
"epoch": 1.0979228486646884, |
|
"grad_norm": 0.9556646347045898, |
|
"learning_rate": 2.3019936742400724e-05, |
|
"loss": 0.8264, |
|
"step": 9250 |
|
}, |
|
{ |
|
"epoch": 1.0991097922848665, |
|
"grad_norm": 0.9002740383148193, |
|
"learning_rate": 2.2971031861814223e-05, |
|
"loss": 0.8421, |
|
"step": 9260 |
|
}, |
|
{ |
|
"epoch": 1.1002967359050446, |
|
"grad_norm": 1.3685026168823242, |
|
"learning_rate": 2.2922134795718477e-05, |
|
"loss": 0.8126, |
|
"step": 9270 |
|
}, |
|
{ |
|
"epoch": 1.1014836795252225, |
|
"grad_norm": 0.9254816174507141, |
|
"learning_rate": 2.287324573243862e-05, |
|
"loss": 0.8157, |
|
"step": 9280 |
|
}, |
|
{ |
|
"epoch": 1.1026706231454007, |
|
"grad_norm": 1.0241316556930542, |
|
"learning_rate": 2.2824364860268927e-05, |
|
"loss": 0.8039, |
|
"step": 9290 |
|
}, |
|
{ |
|
"epoch": 1.1038575667655786, |
|
"grad_norm": 0.941983699798584, |
|
"learning_rate": 2.277549236747218e-05, |
|
"loss": 0.8532, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 1.1050445103857567, |
|
"grad_norm": 0.837225615978241, |
|
"learning_rate": 2.2726628442278826e-05, |
|
"loss": 0.7953, |
|
"step": 9310 |
|
}, |
|
{ |
|
"epoch": 1.1062314540059348, |
|
"grad_norm": 1.406443476676941, |
|
"learning_rate": 2.2677773272886373e-05, |
|
"loss": 0.8247, |
|
"step": 9320 |
|
}, |
|
{ |
|
"epoch": 1.1074183976261127, |
|
"grad_norm": 0.8839634656906128, |
|
"learning_rate": 2.2628927047458558e-05, |
|
"loss": 0.8739, |
|
"step": 9330 |
|
}, |
|
{ |
|
"epoch": 1.1086053412462908, |
|
"grad_norm": 1.0190342664718628, |
|
"learning_rate": 2.258008995412471e-05, |
|
"loss": 0.7675, |
|
"step": 9340 |
|
}, |
|
{ |
|
"epoch": 1.1097922848664687, |
|
"grad_norm": 1.0243916511535645, |
|
"learning_rate": 2.2531262180978952e-05, |
|
"loss": 0.7931, |
|
"step": 9350 |
|
}, |
|
{ |
|
"epoch": 1.1109792284866469, |
|
"grad_norm": 1.0709168910980225, |
|
"learning_rate": 2.2482443916079534e-05, |
|
"loss": 0.8604, |
|
"step": 9360 |
|
}, |
|
{ |
|
"epoch": 1.112166172106825, |
|
"grad_norm": 0.733767569065094, |
|
"learning_rate": 2.2433635347448084e-05, |
|
"loss": 0.7005, |
|
"step": 9370 |
|
}, |
|
{ |
|
"epoch": 1.113353115727003, |
|
"grad_norm": 1.1727817058563232, |
|
"learning_rate": 2.238483666306886e-05, |
|
"loss": 0.7749, |
|
"step": 9380 |
|
}, |
|
{ |
|
"epoch": 1.114540059347181, |
|
"grad_norm": 1.2899572849273682, |
|
"learning_rate": 2.2336048050888095e-05, |
|
"loss": 0.7893, |
|
"step": 9390 |
|
}, |
|
{ |
|
"epoch": 1.115727002967359, |
|
"grad_norm": 1.053328037261963, |
|
"learning_rate": 2.228726969881318e-05, |
|
"loss": 0.7456, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 1.116913946587537, |
|
"grad_norm": 1.1463031768798828, |
|
"learning_rate": 2.2238501794712034e-05, |
|
"loss": 0.8456, |
|
"step": 9410 |
|
}, |
|
{ |
|
"epoch": 1.1181008902077152, |
|
"grad_norm": 1.030958652496338, |
|
"learning_rate": 2.2189744526412297e-05, |
|
"loss": 0.8649, |
|
"step": 9420 |
|
}, |
|
{ |
|
"epoch": 1.119287833827893, |
|
"grad_norm": 1.2256028652191162, |
|
"learning_rate": 2.2140998081700676e-05, |
|
"loss": 0.8161, |
|
"step": 9430 |
|
}, |
|
{ |
|
"epoch": 1.1204747774480712, |
|
"grad_norm": 1.2485847473144531, |
|
"learning_rate": 2.209226264832219e-05, |
|
"loss": 0.8105, |
|
"step": 9440 |
|
}, |
|
{ |
|
"epoch": 1.1216617210682494, |
|
"grad_norm": 1.0359870195388794, |
|
"learning_rate": 2.2043538413979425e-05, |
|
"loss": 0.8313, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 1.1228486646884273, |
|
"grad_norm": 1.0015512704849243, |
|
"learning_rate": 2.199482556633185e-05, |
|
"loss": 0.8401, |
|
"step": 9460 |
|
}, |
|
{ |
|
"epoch": 1.1240356083086054, |
|
"grad_norm": 1.3324792385101318, |
|
"learning_rate": 2.1946124292995075e-05, |
|
"loss": 0.8473, |
|
"step": 9470 |
|
}, |
|
{ |
|
"epoch": 1.1252225519287833, |
|
"grad_norm": 0.9533900618553162, |
|
"learning_rate": 2.1897434781540144e-05, |
|
"loss": 0.7981, |
|
"step": 9480 |
|
}, |
|
{ |
|
"epoch": 1.1264094955489614, |
|
"grad_norm": 1.0634273290634155, |
|
"learning_rate": 2.1848757219492768e-05, |
|
"loss": 0.7351, |
|
"step": 9490 |
|
}, |
|
{ |
|
"epoch": 1.1275964391691395, |
|
"grad_norm": 1.1996541023254395, |
|
"learning_rate": 2.1800091794332672e-05, |
|
"loss": 0.8202, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.1275964391691395, |
|
"eval_loss": 0.7751370072364807, |
|
"eval_runtime": 772.0681, |
|
"eval_samples_per_second": 2.298, |
|
"eval_steps_per_second": 0.575, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.1287833827893174, |
|
"grad_norm": 0.9720987677574158, |
|
"learning_rate": 2.1751438693492834e-05, |
|
"loss": 0.7615, |
|
"step": 9510 |
|
}, |
|
{ |
|
"epoch": 1.1299703264094956, |
|
"grad_norm": 1.2057288885116577, |
|
"learning_rate": 2.1702798104358728e-05, |
|
"loss": 0.8152, |
|
"step": 9520 |
|
}, |
|
{ |
|
"epoch": 1.1311572700296737, |
|
"grad_norm": 1.2114310264587402, |
|
"learning_rate": 2.1654170214267682e-05, |
|
"loss": 0.7721, |
|
"step": 9530 |
|
}, |
|
{ |
|
"epoch": 1.1323442136498516, |
|
"grad_norm": 0.9816735982894897, |
|
"learning_rate": 2.1605555210508086e-05, |
|
"loss": 0.8552, |
|
"step": 9540 |
|
}, |
|
{ |
|
"epoch": 1.1335311572700297, |
|
"grad_norm": 1.4814951419830322, |
|
"learning_rate": 2.155695328031872e-05, |
|
"loss": 0.9111, |
|
"step": 9550 |
|
}, |
|
{ |
|
"epoch": 1.1347181008902076, |
|
"grad_norm": 1.1997096538543701, |
|
"learning_rate": 2.150836461088799e-05, |
|
"loss": 0.8304, |
|
"step": 9560 |
|
}, |
|
{ |
|
"epoch": 1.1359050445103858, |
|
"grad_norm": 0.9619604349136353, |
|
"learning_rate": 2.1459789389353254e-05, |
|
"loss": 0.8256, |
|
"step": 9570 |
|
}, |
|
{ |
|
"epoch": 1.137091988130564, |
|
"grad_norm": 0.8761364221572876, |
|
"learning_rate": 2.1411227802800043e-05, |
|
"loss": 0.832, |
|
"step": 9580 |
|
}, |
|
{ |
|
"epoch": 1.1382789317507418, |
|
"grad_norm": 1.1116739511489868, |
|
"learning_rate": 2.13626800382614e-05, |
|
"loss": 0.81, |
|
"step": 9590 |
|
}, |
|
{ |
|
"epoch": 1.13946587537092, |
|
"grad_norm": 1.316489577293396, |
|
"learning_rate": 2.1314146282717144e-05, |
|
"loss": 0.7807, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 1.1406528189910978, |
|
"grad_norm": 0.8193405866622925, |
|
"learning_rate": 2.1265626723093087e-05, |
|
"loss": 0.7839, |
|
"step": 9610 |
|
}, |
|
{ |
|
"epoch": 1.141839762611276, |
|
"grad_norm": 1.4620572328567505, |
|
"learning_rate": 2.121712154626043e-05, |
|
"loss": 0.7752, |
|
"step": 9620 |
|
}, |
|
{ |
|
"epoch": 1.143026706231454, |
|
"grad_norm": 1.1185797452926636, |
|
"learning_rate": 2.1168630939034923e-05, |
|
"loss": 0.8407, |
|
"step": 9630 |
|
}, |
|
{ |
|
"epoch": 1.144213649851632, |
|
"grad_norm": 1.177851676940918, |
|
"learning_rate": 2.112015508817625e-05, |
|
"loss": 0.8171, |
|
"step": 9640 |
|
}, |
|
{ |
|
"epoch": 1.1454005934718101, |
|
"grad_norm": 1.0961945056915283, |
|
"learning_rate": 2.1071694180387217e-05, |
|
"loss": 0.7837, |
|
"step": 9650 |
|
}, |
|
{ |
|
"epoch": 1.146587537091988, |
|
"grad_norm": 0.984928548336029, |
|
"learning_rate": 2.1023248402313118e-05, |
|
"loss": 0.7953, |
|
"step": 9660 |
|
}, |
|
{ |
|
"epoch": 1.1477744807121661, |
|
"grad_norm": 1.216512680053711, |
|
"learning_rate": 2.097481794054094e-05, |
|
"loss": 0.8274, |
|
"step": 9670 |
|
}, |
|
{ |
|
"epoch": 1.1489614243323443, |
|
"grad_norm": 1.20542573928833, |
|
"learning_rate": 2.092640298159871e-05, |
|
"loss": 0.8014, |
|
"step": 9680 |
|
}, |
|
{ |
|
"epoch": 1.1501483679525222, |
|
"grad_norm": 1.1971958875656128, |
|
"learning_rate": 2.0878003711954727e-05, |
|
"loss": 0.8743, |
|
"step": 9690 |
|
}, |
|
{ |
|
"epoch": 1.1513353115727003, |
|
"grad_norm": 0.7235596776008606, |
|
"learning_rate": 2.0829620318016863e-05, |
|
"loss": 0.7358, |
|
"step": 9700 |
|
}, |
|
{ |
|
"epoch": 1.1525222551928784, |
|
"grad_norm": 0.8866942524909973, |
|
"learning_rate": 2.0781252986131867e-05, |
|
"loss": 0.7841, |
|
"step": 9710 |
|
}, |
|
{ |
|
"epoch": 1.1537091988130563, |
|
"grad_norm": 0.8466529846191406, |
|
"learning_rate": 2.073290190258459e-05, |
|
"loss": 0.7781, |
|
"step": 9720 |
|
}, |
|
{ |
|
"epoch": 1.1548961424332345, |
|
"grad_norm": 1.2863926887512207, |
|
"learning_rate": 2.068456725359735e-05, |
|
"loss": 0.7886, |
|
"step": 9730 |
|
}, |
|
{ |
|
"epoch": 1.1560830860534124, |
|
"grad_norm": 1.4690605401992798, |
|
"learning_rate": 2.0636249225329105e-05, |
|
"loss": 0.8238, |
|
"step": 9740 |
|
}, |
|
{ |
|
"epoch": 1.1572700296735905, |
|
"grad_norm": 1.0647927522659302, |
|
"learning_rate": 2.058794800387486e-05, |
|
"loss": 0.8475, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 1.1584569732937686, |
|
"grad_norm": 0.860795795917511, |
|
"learning_rate": 2.053966377526487e-05, |
|
"loss": 0.7295, |
|
"step": 9760 |
|
}, |
|
{ |
|
"epoch": 1.1596439169139465, |
|
"grad_norm": 1.1758381128311157, |
|
"learning_rate": 2.0491396725463916e-05, |
|
"loss": 0.7597, |
|
"step": 9770 |
|
}, |
|
{ |
|
"epoch": 1.1608308605341247, |
|
"grad_norm": 1.068555235862732, |
|
"learning_rate": 2.0443147040370657e-05, |
|
"loss": 0.7811, |
|
"step": 9780 |
|
}, |
|
{ |
|
"epoch": 1.1620178041543028, |
|
"grad_norm": 0.9324743151664734, |
|
"learning_rate": 2.0394914905816836e-05, |
|
"loss": 0.8155, |
|
"step": 9790 |
|
}, |
|
{ |
|
"epoch": 1.1632047477744807, |
|
"grad_norm": 1.0542433261871338, |
|
"learning_rate": 2.0346700507566628e-05, |
|
"loss": 0.8496, |
|
"step": 9800 |
|
}, |
|
{ |
|
"epoch": 1.1643916913946588, |
|
"grad_norm": 1.0209101438522339, |
|
"learning_rate": 2.0298504031315875e-05, |
|
"loss": 0.8186, |
|
"step": 9810 |
|
}, |
|
{ |
|
"epoch": 1.1655786350148367, |
|
"grad_norm": 1.0995596647262573, |
|
"learning_rate": 2.0250325662691422e-05, |
|
"loss": 0.8151, |
|
"step": 9820 |
|
}, |
|
{ |
|
"epoch": 1.1667655786350148, |
|
"grad_norm": 1.0881950855255127, |
|
"learning_rate": 2.020216558725033e-05, |
|
"loss": 0.7281, |
|
"step": 9830 |
|
}, |
|
{ |
|
"epoch": 1.167952522255193, |
|
"grad_norm": 0.8314598202705383, |
|
"learning_rate": 2.0154023990479244e-05, |
|
"loss": 0.7935, |
|
"step": 9840 |
|
}, |
|
{ |
|
"epoch": 1.1691394658753709, |
|
"grad_norm": 0.8414393067359924, |
|
"learning_rate": 2.010590105779364e-05, |
|
"loss": 0.7565, |
|
"step": 9850 |
|
}, |
|
{ |
|
"epoch": 1.170326409495549, |
|
"grad_norm": 1.2225565910339355, |
|
"learning_rate": 2.0057796974537066e-05, |
|
"loss": 0.8241, |
|
"step": 9860 |
|
}, |
|
{ |
|
"epoch": 1.171513353115727, |
|
"grad_norm": 1.2027302980422974, |
|
"learning_rate": 2.000971192598053e-05, |
|
"loss": 0.8656, |
|
"step": 9870 |
|
}, |
|
{ |
|
"epoch": 1.172700296735905, |
|
"grad_norm": 0.9175437688827515, |
|
"learning_rate": 1.996164609732167e-05, |
|
"loss": 0.7888, |
|
"step": 9880 |
|
}, |
|
{ |
|
"epoch": 1.1738872403560832, |
|
"grad_norm": 1.1863752603530884, |
|
"learning_rate": 1.991359967368416e-05, |
|
"loss": 0.8331, |
|
"step": 9890 |
|
}, |
|
{ |
|
"epoch": 1.175074183976261, |
|
"grad_norm": 0.907460629940033, |
|
"learning_rate": 1.9865572840116888e-05, |
|
"loss": 0.8613, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 1.1762611275964392, |
|
"grad_norm": 1.053999423980713, |
|
"learning_rate": 1.9817565781593302e-05, |
|
"loss": 0.806, |
|
"step": 9910 |
|
}, |
|
{ |
|
"epoch": 1.177448071216617, |
|
"grad_norm": 0.7389574646949768, |
|
"learning_rate": 1.9769578683010714e-05, |
|
"loss": 0.8112, |
|
"step": 9920 |
|
}, |
|
{ |
|
"epoch": 1.1786350148367952, |
|
"grad_norm": 1.1020927429199219, |
|
"learning_rate": 1.9721611729189527e-05, |
|
"loss": 0.8336, |
|
"step": 9930 |
|
}, |
|
{ |
|
"epoch": 1.1798219584569734, |
|
"grad_norm": 1.0626370906829834, |
|
"learning_rate": 1.967366510487258e-05, |
|
"loss": 0.8218, |
|
"step": 9940 |
|
}, |
|
{ |
|
"epoch": 1.1810089020771513, |
|
"grad_norm": 1.0998564958572388, |
|
"learning_rate": 1.96257389947244e-05, |
|
"loss": 0.7585, |
|
"step": 9950 |
|
}, |
|
{ |
|
"epoch": 1.1821958456973294, |
|
"grad_norm": 1.141060471534729, |
|
"learning_rate": 1.9577833583330518e-05, |
|
"loss": 0.8047, |
|
"step": 9960 |
|
}, |
|
{ |
|
"epoch": 1.1833827893175075, |
|
"grad_norm": 1.1182451248168945, |
|
"learning_rate": 1.9529949055196724e-05, |
|
"loss": 0.7609, |
|
"step": 9970 |
|
}, |
|
{ |
|
"epoch": 1.1845697329376854, |
|
"grad_norm": 0.8619179129600525, |
|
"learning_rate": 1.94820855947484e-05, |
|
"loss": 0.7636, |
|
"step": 9980 |
|
}, |
|
{ |
|
"epoch": 1.1857566765578635, |
|
"grad_norm": 1.414411187171936, |
|
"learning_rate": 1.9434243386329764e-05, |
|
"loss": 0.7444, |
|
"step": 9990 |
|
}, |
|
{ |
|
"epoch": 1.1869436201780414, |
|
"grad_norm": 1.1006312370300293, |
|
"learning_rate": 1.9386422614203212e-05, |
|
"loss": 0.8047, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 1.1869436201780414, |
|
"eval_loss": 0.7749348282814026, |
|
"eval_runtime": 772.3216, |
|
"eval_samples_per_second": 2.297, |
|
"eval_steps_per_second": 0.575, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 1.1881305637982196, |
|
"grad_norm": 0.8358052968978882, |
|
"learning_rate": 1.933862346254855e-05, |
|
"loss": 0.793, |
|
"step": 10010 |
|
}, |
|
{ |
|
"epoch": 1.1893175074183977, |
|
"grad_norm": 0.9573304653167725, |
|
"learning_rate": 1.929084611546233e-05, |
|
"loss": 0.8799, |
|
"step": 10020 |
|
}, |
|
{ |
|
"epoch": 1.1905044510385756, |
|
"grad_norm": 1.1877682209014893, |
|
"learning_rate": 1.924309075695712e-05, |
|
"loss": 0.8131, |
|
"step": 10030 |
|
}, |
|
{ |
|
"epoch": 1.1916913946587537, |
|
"grad_norm": 1.4036526679992676, |
|
"learning_rate": 1.919535757096079e-05, |
|
"loss": 0.8705, |
|
"step": 10040 |
|
}, |
|
{ |
|
"epoch": 1.1928783382789319, |
|
"grad_norm": 1.054739236831665, |
|
"learning_rate": 1.914764674131584e-05, |
|
"loss": 0.8792, |
|
"step": 10050 |
|
}, |
|
{ |
|
"epoch": 1.1940652818991098, |
|
"grad_norm": 1.0724046230316162, |
|
"learning_rate": 1.909995845177863e-05, |
|
"loss": 0.7757, |
|
"step": 10060 |
|
}, |
|
{ |
|
"epoch": 1.195252225519288, |
|
"grad_norm": 1.3081406354904175, |
|
"learning_rate": 1.9052292886018745e-05, |
|
"loss": 0.7989, |
|
"step": 10070 |
|
}, |
|
{ |
|
"epoch": 1.1964391691394658, |
|
"grad_norm": 1.3429408073425293, |
|
"learning_rate": 1.9004650227618215e-05, |
|
"loss": 0.8631, |
|
"step": 10080 |
|
}, |
|
{ |
|
"epoch": 1.197626112759644, |
|
"grad_norm": 1.1177902221679688, |
|
"learning_rate": 1.8957030660070862e-05, |
|
"loss": 0.7897, |
|
"step": 10090 |
|
}, |
|
{ |
|
"epoch": 1.198813056379822, |
|
"grad_norm": 1.1529994010925293, |
|
"learning_rate": 1.8909434366781592e-05, |
|
"loss": 0.8375, |
|
"step": 10100 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 1.2478564977645874, |
|
"learning_rate": 1.8861861531065622e-05, |
|
"loss": 0.7458, |
|
"step": 10110 |
|
}, |
|
{ |
|
"epoch": 1.201186943620178, |
|
"grad_norm": 0.9045754671096802, |
|
"learning_rate": 1.881431233614788e-05, |
|
"loss": 0.8016, |
|
"step": 10120 |
|
}, |
|
{ |
|
"epoch": 1.202373887240356, |
|
"grad_norm": 1.0951228141784668, |
|
"learning_rate": 1.8766786965162198e-05, |
|
"loss": 0.7926, |
|
"step": 10130 |
|
}, |
|
{ |
|
"epoch": 1.2035608308605341, |
|
"grad_norm": 1.0310611724853516, |
|
"learning_rate": 1.8719285601150665e-05, |
|
"loss": 0.859, |
|
"step": 10140 |
|
}, |
|
{ |
|
"epoch": 1.2047477744807122, |
|
"grad_norm": 1.3347513675689697, |
|
"learning_rate": 1.8671808427062924e-05, |
|
"loss": 0.8695, |
|
"step": 10150 |
|
}, |
|
{ |
|
"epoch": 1.2059347181008901, |
|
"grad_norm": 1.2841068506240845, |
|
"learning_rate": 1.8624355625755423e-05, |
|
"loss": 0.7943, |
|
"step": 10160 |
|
}, |
|
{ |
|
"epoch": 1.2071216617210683, |
|
"grad_norm": 1.022180199623108, |
|
"learning_rate": 1.8576927379990776e-05, |
|
"loss": 0.8996, |
|
"step": 10170 |
|
}, |
|
{ |
|
"epoch": 1.2083086053412462, |
|
"grad_norm": 0.9655975103378296, |
|
"learning_rate": 1.852952387243698e-05, |
|
"loss": 0.7883, |
|
"step": 10180 |
|
}, |
|
{ |
|
"epoch": 1.2094955489614243, |
|
"grad_norm": 1.4115021228790283, |
|
"learning_rate": 1.848214528566679e-05, |
|
"loss": 0.7879, |
|
"step": 10190 |
|
}, |
|
{ |
|
"epoch": 1.2106824925816024, |
|
"grad_norm": 1.3277826309204102, |
|
"learning_rate": 1.843479180215695e-05, |
|
"loss": 0.8263, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 1.2118694362017803, |
|
"grad_norm": 1.1939499378204346, |
|
"learning_rate": 1.8387463604287554e-05, |
|
"loss": 0.8068, |
|
"step": 10210 |
|
}, |
|
{ |
|
"epoch": 1.2130563798219585, |
|
"grad_norm": 1.1342905759811401, |
|
"learning_rate": 1.834016087434126e-05, |
|
"loss": 0.7618, |
|
"step": 10220 |
|
}, |
|
{ |
|
"epoch": 1.2142433234421366, |
|
"grad_norm": 1.5079452991485596, |
|
"learning_rate": 1.8292883794502697e-05, |
|
"loss": 0.7817, |
|
"step": 10230 |
|
}, |
|
{ |
|
"epoch": 1.2154302670623145, |
|
"grad_norm": 0.8599739074707031, |
|
"learning_rate": 1.8245632546857655e-05, |
|
"loss": 0.7645, |
|
"step": 10240 |
|
}, |
|
{ |
|
"epoch": 1.2166172106824926, |
|
"grad_norm": 1.26143217086792, |
|
"learning_rate": 1.8198407313392447e-05, |
|
"loss": 0.8963, |
|
"step": 10250 |
|
}, |
|
{ |
|
"epoch": 1.2178041543026705, |
|
"grad_norm": 1.018254041671753, |
|
"learning_rate": 1.815120827599322e-05, |
|
"loss": 0.8858, |
|
"step": 10260 |
|
}, |
|
{ |
|
"epoch": 1.2189910979228487, |
|
"grad_norm": 1.004217505455017, |
|
"learning_rate": 1.8104035616445182e-05, |
|
"loss": 0.8171, |
|
"step": 10270 |
|
}, |
|
{ |
|
"epoch": 1.2201780415430268, |
|
"grad_norm": 1.0886305570602417, |
|
"learning_rate": 1.8056889516431986e-05, |
|
"loss": 0.7771, |
|
"step": 10280 |
|
}, |
|
{ |
|
"epoch": 1.2213649851632047, |
|
"grad_norm": 1.4587886333465576, |
|
"learning_rate": 1.8009770157534955e-05, |
|
"loss": 0.8639, |
|
"step": 10290 |
|
}, |
|
{ |
|
"epoch": 1.2225519287833828, |
|
"grad_norm": 1.419061303138733, |
|
"learning_rate": 1.7962677721232475e-05, |
|
"loss": 0.7838, |
|
"step": 10300 |
|
}, |
|
{ |
|
"epoch": 1.223738872403561, |
|
"grad_norm": 0.8688335418701172, |
|
"learning_rate": 1.7915612388899173e-05, |
|
"loss": 0.7259, |
|
"step": 10310 |
|
}, |
|
{ |
|
"epoch": 1.2249258160237388, |
|
"grad_norm": 1.0464247465133667, |
|
"learning_rate": 1.786857434180534e-05, |
|
"loss": 0.7191, |
|
"step": 10320 |
|
}, |
|
{ |
|
"epoch": 1.226112759643917, |
|
"grad_norm": 1.0211148262023926, |
|
"learning_rate": 1.7821563761116157e-05, |
|
"loss": 0.7761, |
|
"step": 10330 |
|
}, |
|
{ |
|
"epoch": 1.2272997032640949, |
|
"grad_norm": 1.3132156133651733, |
|
"learning_rate": 1.777458082789102e-05, |
|
"loss": 0.8556, |
|
"step": 10340 |
|
}, |
|
{ |
|
"epoch": 1.228486646884273, |
|
"grad_norm": 1.148878812789917, |
|
"learning_rate": 1.7727625723082843e-05, |
|
"loss": 0.8134, |
|
"step": 10350 |
|
}, |
|
{ |
|
"epoch": 1.2296735905044511, |
|
"grad_norm": 1.038454294204712, |
|
"learning_rate": 1.7680698627537355e-05, |
|
"loss": 0.8661, |
|
"step": 10360 |
|
}, |
|
{ |
|
"epoch": 1.230860534124629, |
|
"grad_norm": 1.1032830476760864, |
|
"learning_rate": 1.7633799721992423e-05, |
|
"loss": 0.8575, |
|
"step": 10370 |
|
}, |
|
{ |
|
"epoch": 1.2320474777448072, |
|
"grad_norm": 0.923642635345459, |
|
"learning_rate": 1.7586929187077318e-05, |
|
"loss": 0.8379, |
|
"step": 10380 |
|
}, |
|
{ |
|
"epoch": 1.233234421364985, |
|
"grad_norm": 1.3686367273330688, |
|
"learning_rate": 1.7540087203312074e-05, |
|
"loss": 0.8902, |
|
"step": 10390 |
|
}, |
|
{ |
|
"epoch": 1.2344213649851632, |
|
"grad_norm": 1.2714051008224487, |
|
"learning_rate": 1.7493273951106727e-05, |
|
"loss": 0.9215, |
|
"step": 10400 |
|
}, |
|
{ |
|
"epoch": 1.2356083086053413, |
|
"grad_norm": 1.0131839513778687, |
|
"learning_rate": 1.744648961076068e-05, |
|
"loss": 0.8196, |
|
"step": 10410 |
|
}, |
|
{ |
|
"epoch": 1.2367952522255192, |
|
"grad_norm": 1.1317576169967651, |
|
"learning_rate": 1.7399734362461985e-05, |
|
"loss": 0.816, |
|
"step": 10420 |
|
}, |
|
{ |
|
"epoch": 1.2379821958456974, |
|
"grad_norm": 0.8517586588859558, |
|
"learning_rate": 1.7353008386286614e-05, |
|
"loss": 0.8241, |
|
"step": 10430 |
|
}, |
|
{ |
|
"epoch": 1.2391691394658753, |
|
"grad_norm": 1.0312011241912842, |
|
"learning_rate": 1.7306311862197843e-05, |
|
"loss": 0.8058, |
|
"step": 10440 |
|
}, |
|
{ |
|
"epoch": 1.2403560830860534, |
|
"grad_norm": 0.7310718894004822, |
|
"learning_rate": 1.725964497004548e-05, |
|
"loss": 0.7813, |
|
"step": 10450 |
|
}, |
|
{ |
|
"epoch": 1.2415430267062315, |
|
"grad_norm": 0.9712517857551575, |
|
"learning_rate": 1.7213007889565225e-05, |
|
"loss": 0.8308, |
|
"step": 10460 |
|
}, |
|
{ |
|
"epoch": 1.2427299703264094, |
|
"grad_norm": 1.0188968181610107, |
|
"learning_rate": 1.7166400800377948e-05, |
|
"loss": 0.7791, |
|
"step": 10470 |
|
}, |
|
{ |
|
"epoch": 1.2439169139465875, |
|
"grad_norm": 1.0058274269104004, |
|
"learning_rate": 1.7119823881989012e-05, |
|
"loss": 0.8117, |
|
"step": 10480 |
|
}, |
|
{ |
|
"epoch": 1.2451038575667657, |
|
"grad_norm": 1.0918079614639282, |
|
"learning_rate": 1.7073277313787602e-05, |
|
"loss": 0.7714, |
|
"step": 10490 |
|
}, |
|
{ |
|
"epoch": 1.2462908011869436, |
|
"grad_norm": 0.7621090412139893, |
|
"learning_rate": 1.7026761275045965e-05, |
|
"loss": 0.7538, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 1.2462908011869436, |
|
"eval_loss": 0.7732818722724915, |
|
"eval_runtime": 804.3434, |
|
"eval_samples_per_second": 2.206, |
|
"eval_steps_per_second": 0.552, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 1.2474777448071217, |
|
"grad_norm": 1.076611876487732, |
|
"learning_rate": 1.6980275944918804e-05, |
|
"loss": 0.7582, |
|
"step": 10510 |
|
}, |
|
{ |
|
"epoch": 1.2486646884272996, |
|
"grad_norm": 0.965393602848053, |
|
"learning_rate": 1.6933821502442527e-05, |
|
"loss": 0.7812, |
|
"step": 10520 |
|
}, |
|
{ |
|
"epoch": 1.2498516320474777, |
|
"grad_norm": 1.0871965885162354, |
|
"learning_rate": 1.6887398126534594e-05, |
|
"loss": 0.7601, |
|
"step": 10530 |
|
}, |
|
{ |
|
"epoch": 1.2510385756676559, |
|
"grad_norm": 0.9238999485969543, |
|
"learning_rate": 1.68410059959928e-05, |
|
"loss": 0.8194, |
|
"step": 10540 |
|
}, |
|
{ |
|
"epoch": 1.2522255192878338, |
|
"grad_norm": 1.1522690057754517, |
|
"learning_rate": 1.679464528949461e-05, |
|
"loss": 0.8596, |
|
"step": 10550 |
|
}, |
|
{ |
|
"epoch": 1.253412462908012, |
|
"grad_norm": 1.2266411781311035, |
|
"learning_rate": 1.674831618559645e-05, |
|
"loss": 0.7715, |
|
"step": 10560 |
|
}, |
|
{ |
|
"epoch": 1.25459940652819, |
|
"grad_norm": 1.0644117593765259, |
|
"learning_rate": 1.6702018862733045e-05, |
|
"loss": 0.7796, |
|
"step": 10570 |
|
}, |
|
{ |
|
"epoch": 1.255786350148368, |
|
"grad_norm": 1.1088664531707764, |
|
"learning_rate": 1.6655753499216708e-05, |
|
"loss": 0.8424, |
|
"step": 10580 |
|
}, |
|
{ |
|
"epoch": 1.256973293768546, |
|
"grad_norm": 1.0059804916381836, |
|
"learning_rate": 1.660952027323665e-05, |
|
"loss": 0.8276, |
|
"step": 10590 |
|
}, |
|
{ |
|
"epoch": 1.258160237388724, |
|
"grad_norm": 0.947927713394165, |
|
"learning_rate": 1.6563319362858333e-05, |
|
"loss": 0.8039, |
|
"step": 10600 |
|
}, |
|
{ |
|
"epoch": 1.259347181008902, |
|
"grad_norm": 1.1396689414978027, |
|
"learning_rate": 1.6517150946022725e-05, |
|
"loss": 0.8359, |
|
"step": 10610 |
|
}, |
|
{ |
|
"epoch": 1.2605341246290802, |
|
"grad_norm": 1.1726751327514648, |
|
"learning_rate": 1.6471015200545682e-05, |
|
"loss": 0.7481, |
|
"step": 10620 |
|
}, |
|
{ |
|
"epoch": 1.2617210682492581, |
|
"grad_norm": 0.9562637805938721, |
|
"learning_rate": 1.6424912304117183e-05, |
|
"loss": 0.7441, |
|
"step": 10630 |
|
}, |
|
{ |
|
"epoch": 1.2629080118694362, |
|
"grad_norm": 1.0816010236740112, |
|
"learning_rate": 1.6378842434300746e-05, |
|
"loss": 0.8136, |
|
"step": 10640 |
|
}, |
|
{ |
|
"epoch": 1.2640949554896141, |
|
"grad_norm": 1.0869776010513306, |
|
"learning_rate": 1.6332805768532628e-05, |
|
"loss": 0.8231, |
|
"step": 10650 |
|
}, |
|
{ |
|
"epoch": 1.2652818991097923, |
|
"grad_norm": 0.6635779142379761, |
|
"learning_rate": 1.6286802484121244e-05, |
|
"loss": 0.7143, |
|
"step": 10660 |
|
}, |
|
{ |
|
"epoch": 1.2664688427299704, |
|
"grad_norm": 1.0061339139938354, |
|
"learning_rate": 1.624083275824644e-05, |
|
"loss": 0.7553, |
|
"step": 10670 |
|
}, |
|
{ |
|
"epoch": 1.2676557863501483, |
|
"grad_norm": 1.0020318031311035, |
|
"learning_rate": 1.6194896767958785e-05, |
|
"loss": 0.7943, |
|
"step": 10680 |
|
}, |
|
{ |
|
"epoch": 1.2688427299703264, |
|
"grad_norm": 0.9754176735877991, |
|
"learning_rate": 1.614899469017894e-05, |
|
"loss": 0.7514, |
|
"step": 10690 |
|
}, |
|
{ |
|
"epoch": 1.2700296735905043, |
|
"grad_norm": 0.9844280481338501, |
|
"learning_rate": 1.610312670169695e-05, |
|
"loss": 0.8493, |
|
"step": 10700 |
|
}, |
|
{ |
|
"epoch": 1.2712166172106825, |
|
"grad_norm": 1.2735682725906372, |
|
"learning_rate": 1.6057292979171565e-05, |
|
"loss": 0.8771, |
|
"step": 10710 |
|
}, |
|
{ |
|
"epoch": 1.2724035608308606, |
|
"grad_norm": 1.0701887607574463, |
|
"learning_rate": 1.6011493699129544e-05, |
|
"loss": 0.8163, |
|
"step": 10720 |
|
}, |
|
{ |
|
"epoch": 1.2735905044510385, |
|
"grad_norm": 1.1788859367370605, |
|
"learning_rate": 1.596572903796502e-05, |
|
"loss": 0.8017, |
|
"step": 10730 |
|
}, |
|
{ |
|
"epoch": 1.2747774480712166, |
|
"grad_norm": 1.3015997409820557, |
|
"learning_rate": 1.5919999171938792e-05, |
|
"loss": 0.8182, |
|
"step": 10740 |
|
}, |
|
{ |
|
"epoch": 1.2759643916913945, |
|
"grad_norm": 0.8140105605125427, |
|
"learning_rate": 1.5874304277177616e-05, |
|
"loss": 0.8411, |
|
"step": 10750 |
|
}, |
|
{ |
|
"epoch": 1.2771513353115727, |
|
"grad_norm": 1.0661457777023315, |
|
"learning_rate": 1.582864452967359e-05, |
|
"loss": 0.7887, |
|
"step": 10760 |
|
}, |
|
{ |
|
"epoch": 1.2783382789317508, |
|
"grad_norm": 1.1822696924209595, |
|
"learning_rate": 1.5783020105283416e-05, |
|
"loss": 0.878, |
|
"step": 10770 |
|
}, |
|
{ |
|
"epoch": 1.279525222551929, |
|
"grad_norm": 1.4426151514053345, |
|
"learning_rate": 1.5737431179727785e-05, |
|
"loss": 0.7839, |
|
"step": 10780 |
|
}, |
|
{ |
|
"epoch": 1.2807121661721068, |
|
"grad_norm": 1.173456072807312, |
|
"learning_rate": 1.5691877928590624e-05, |
|
"loss": 0.7866, |
|
"step": 10790 |
|
}, |
|
{ |
|
"epoch": 1.281899109792285, |
|
"grad_norm": 1.166433334350586, |
|
"learning_rate": 1.5646360527318498e-05, |
|
"loss": 0.7891, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 1.2830860534124628, |
|
"grad_norm": 0.8934957385063171, |
|
"learning_rate": 1.5600879151219878e-05, |
|
"loss": 0.8032, |
|
"step": 10810 |
|
}, |
|
{ |
|
"epoch": 1.284272997032641, |
|
"grad_norm": 0.9699043035507202, |
|
"learning_rate": 1.5555433975464488e-05, |
|
"loss": 0.8116, |
|
"step": 10820 |
|
}, |
|
{ |
|
"epoch": 1.285459940652819, |
|
"grad_norm": 1.186395287513733, |
|
"learning_rate": 1.551002517508264e-05, |
|
"loss": 0.7575, |
|
"step": 10830 |
|
}, |
|
{ |
|
"epoch": 1.286646884272997, |
|
"grad_norm": 1.1024322509765625, |
|
"learning_rate": 1.546465292496452e-05, |
|
"loss": 0.8077, |
|
"step": 10840 |
|
}, |
|
{ |
|
"epoch": 1.2878338278931751, |
|
"grad_norm": 1.213882565498352, |
|
"learning_rate": 1.541931739985958e-05, |
|
"loss": 0.7592, |
|
"step": 10850 |
|
}, |
|
{ |
|
"epoch": 1.289020771513353, |
|
"grad_norm": 1.1226390600204468, |
|
"learning_rate": 1.537401877437578e-05, |
|
"loss": 0.7934, |
|
"step": 10860 |
|
}, |
|
{ |
|
"epoch": 1.2902077151335312, |
|
"grad_norm": 0.8497867584228516, |
|
"learning_rate": 1.5328757222979025e-05, |
|
"loss": 0.787, |
|
"step": 10870 |
|
}, |
|
{ |
|
"epoch": 1.2913946587537093, |
|
"grad_norm": 1.041912317276001, |
|
"learning_rate": 1.5283532919992362e-05, |
|
"loss": 0.8114, |
|
"step": 10880 |
|
}, |
|
{ |
|
"epoch": 1.2925816023738872, |
|
"grad_norm": 1.0151110887527466, |
|
"learning_rate": 1.5238346039595433e-05, |
|
"loss": 0.8071, |
|
"step": 10890 |
|
}, |
|
{ |
|
"epoch": 1.2937685459940653, |
|
"grad_norm": 1.183481216430664, |
|
"learning_rate": 1.5193196755823714e-05, |
|
"loss": 0.7622, |
|
"step": 10900 |
|
}, |
|
{ |
|
"epoch": 1.2949554896142432, |
|
"grad_norm": 1.1755239963531494, |
|
"learning_rate": 1.5148085242567894e-05, |
|
"loss": 0.8257, |
|
"step": 10910 |
|
}, |
|
{ |
|
"epoch": 1.2961424332344214, |
|
"grad_norm": 1.057356834411621, |
|
"learning_rate": 1.5103011673573193e-05, |
|
"loss": 0.8093, |
|
"step": 10920 |
|
}, |
|
{ |
|
"epoch": 1.2973293768545995, |
|
"grad_norm": 0.8958760499954224, |
|
"learning_rate": 1.5057976222438667e-05, |
|
"loss": 0.722, |
|
"step": 10930 |
|
}, |
|
{ |
|
"epoch": 1.2985163204747774, |
|
"grad_norm": 1.42214834690094, |
|
"learning_rate": 1.50129790626166e-05, |
|
"loss": 0.8296, |
|
"step": 10940 |
|
}, |
|
{ |
|
"epoch": 1.2997032640949555, |
|
"grad_norm": 1.1652711629867554, |
|
"learning_rate": 1.4968020367411758e-05, |
|
"loss": 0.8211, |
|
"step": 10950 |
|
}, |
|
{ |
|
"epoch": 1.3008902077151334, |
|
"grad_norm": 0.9645259380340576, |
|
"learning_rate": 1.4923100309980805e-05, |
|
"loss": 0.7363, |
|
"step": 10960 |
|
}, |
|
{ |
|
"epoch": 1.3020771513353115, |
|
"grad_norm": 1.1167161464691162, |
|
"learning_rate": 1.487821906333155e-05, |
|
"loss": 0.8433, |
|
"step": 10970 |
|
}, |
|
{ |
|
"epoch": 1.3032640949554897, |
|
"grad_norm": 0.841799795627594, |
|
"learning_rate": 1.4833376800322352e-05, |
|
"loss": 0.8135, |
|
"step": 10980 |
|
}, |
|
{ |
|
"epoch": 1.3044510385756676, |
|
"grad_norm": 1.0974364280700684, |
|
"learning_rate": 1.4788573693661429e-05, |
|
"loss": 0.8523, |
|
"step": 10990 |
|
}, |
|
{ |
|
"epoch": 1.3056379821958457, |
|
"grad_norm": 1.2325725555419922, |
|
"learning_rate": 1.4743809915906162e-05, |
|
"loss": 0.8301, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 1.3056379821958457, |
|
"eval_loss": 0.7716161012649536, |
|
"eval_runtime": 773.4944, |
|
"eval_samples_per_second": 2.293, |
|
"eval_steps_per_second": 0.574, |
|
"step": 11000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 16850, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 4, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7.667075811983229e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|