|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 100, |
|
"global_step": 3897, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 45.099327087402344, |
|
"learning_rate": 0.00019948678470618425, |
|
"loss": 2.0895, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 34.96247100830078, |
|
"learning_rate": 0.0001989735694123685, |
|
"loss": 2.3707, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 16.490638732910156, |
|
"learning_rate": 0.00019846035411855275, |
|
"loss": 1.3701, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.11159859597682953, |
|
"learning_rate": 0.000197947138824737, |
|
"loss": 1.0582, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.12606161832809448, |
|
"learning_rate": 0.00019743392353092123, |
|
"loss": 1.1387, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.384986400604248, |
|
"learning_rate": 0.00019692070823710547, |
|
"loss": 2.2113, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.7131040692329407, |
|
"learning_rate": 0.00019640749294328974, |
|
"loss": 0.6345, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.308964252471924, |
|
"learning_rate": 0.00019589427764947397, |
|
"loss": 1.0728, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.04876876622438431, |
|
"learning_rate": 0.0001953810623556582, |
|
"loss": 1.6609, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 8.89411735534668, |
|
"learning_rate": 0.00019486784706184245, |
|
"loss": 1.8662, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.05204994976520538, |
|
"learning_rate": 0.00019435463176802672, |
|
"loss": 0.5977, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 34.89448165893555, |
|
"learning_rate": 0.00019384141647421096, |
|
"loss": 2.2815, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 15.786956787109375, |
|
"learning_rate": 0.0001933282011803952, |
|
"loss": 1.6381, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 29.863065719604492, |
|
"learning_rate": 0.00019281498588657943, |
|
"loss": 0.7649, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 16.698570251464844, |
|
"learning_rate": 0.00019230177059276367, |
|
"loss": 1.1147, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 34.869384765625, |
|
"learning_rate": 0.00019178855529894794, |
|
"loss": 1.4108, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.3193562924861908, |
|
"learning_rate": 0.00019127534000513218, |
|
"loss": 1.0166, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 23.633769989013672, |
|
"learning_rate": 0.00019076212471131642, |
|
"loss": 1.0537, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.03772430121898651, |
|
"learning_rate": 0.00019024890941750065, |
|
"loss": 0.6254, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.246229648590088, |
|
"learning_rate": 0.0001897356941236849, |
|
"loss": 2.0714, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.4526054859161377, |
|
"learning_rate": 0.00018922247882986913, |
|
"loss": 1.0681, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.3229526281356812, |
|
"learning_rate": 0.00018870926353605337, |
|
"loss": 0.6116, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.042341507971286774, |
|
"learning_rate": 0.0001881960482422376, |
|
"loss": 1.4669, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 35.0599479675293, |
|
"learning_rate": 0.00018768283294842188, |
|
"loss": 0.6897, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.21465550363063812, |
|
"learning_rate": 0.00018716961765460611, |
|
"loss": 1.8696, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 34.034175872802734, |
|
"learning_rate": 0.00018665640236079035, |
|
"loss": 1.5576, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 34.895172119140625, |
|
"learning_rate": 0.0001861431870669746, |
|
"loss": 1.6542, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.30317962169647217, |
|
"learning_rate": 0.00018562997177315883, |
|
"loss": 0.9893, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 31.76571273803711, |
|
"learning_rate": 0.0001851167564793431, |
|
"loss": 2.43, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.9265520572662354, |
|
"learning_rate": 0.00018460354118552733, |
|
"loss": 0.6246, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.8710033893585205, |
|
"learning_rate": 0.00018409032589171157, |
|
"loss": 1.2509, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 28.637104034423828, |
|
"learning_rate": 0.0001835771105978958, |
|
"loss": 1.4902, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 4.821115970611572, |
|
"learning_rate": 0.00018306389530408008, |
|
"loss": 0.7693, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.466738224029541, |
|
"learning_rate": 0.00018255068001026432, |
|
"loss": 1.9563, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 34.78805160522461, |
|
"learning_rate": 0.00018203746471644856, |
|
"loss": 1.4862, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.344199180603027, |
|
"learning_rate": 0.0001815242494226328, |
|
"loss": 1.1602, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.4355538785457611, |
|
"learning_rate": 0.00018101103412881703, |
|
"loss": 1.2546, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.7162923216819763, |
|
"learning_rate": 0.0001804978188350013, |
|
"loss": 3.0478, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.09367480874061584, |
|
"learning_rate": 0.00017998460354118554, |
|
"loss": 1.0921, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.20555052161216736, |
|
"learning_rate": 0.00017947138824736978, |
|
"loss": 0.0523, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.08092090487480164, |
|
"learning_rate": 0.00017895817295355402, |
|
"loss": 1.5604, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.32685595750808716, |
|
"learning_rate": 0.00017844495765973828, |
|
"loss": 0.5272, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.5422254800796509, |
|
"learning_rate": 0.00017793174236592252, |
|
"loss": 3.4396, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 35.279212951660156, |
|
"learning_rate": 0.00017741852707210676, |
|
"loss": 0.8554, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 26.898597717285156, |
|
"learning_rate": 0.000176905311778291, |
|
"loss": 0.6776, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.39926862716674805, |
|
"learning_rate": 0.00017639209648447526, |
|
"loss": 2.0551, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.39992421865463257, |
|
"learning_rate": 0.0001758788811906595, |
|
"loss": 0.6316, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.15786024928092957, |
|
"learning_rate": 0.00017536566589684374, |
|
"loss": 2.5885, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 35.12531280517578, |
|
"learning_rate": 0.00017485245060302798, |
|
"loss": 1.5028, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.712053656578064, |
|
"learning_rate": 0.00017433923530921222, |
|
"loss": 0.7324, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 28.602834701538086, |
|
"learning_rate": 0.00017382602001539648, |
|
"loss": 2.7516, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 8.03342342376709, |
|
"learning_rate": 0.00017331280472158072, |
|
"loss": 0.3827, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 33.861934661865234, |
|
"learning_rate": 0.00017279958942776496, |
|
"loss": 1.1201, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.0627857968211174, |
|
"learning_rate": 0.0001722863741339492, |
|
"loss": 0.6096, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 34.3585319519043, |
|
"learning_rate": 0.00017177315884013344, |
|
"loss": 1.4903, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 33.19193649291992, |
|
"learning_rate": 0.00017125994354631768, |
|
"loss": 1.4301, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 34.12609100341797, |
|
"learning_rate": 0.00017074672825250194, |
|
"loss": 1.1228, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.07128182798624039, |
|
"learning_rate": 0.00017023351295868618, |
|
"loss": 0.8195, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 34.57550048828125, |
|
"learning_rate": 0.00016972029766487042, |
|
"loss": 1.9291, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 60.265777587890625, |
|
"learning_rate": 0.00016920708237105466, |
|
"loss": 2.2235, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 33.86371612548828, |
|
"learning_rate": 0.0001686938670772389, |
|
"loss": 1.101, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.018540620803833, |
|
"learning_rate": 0.00016818065178342314, |
|
"loss": 1.0946, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.05546221882104874, |
|
"learning_rate": 0.00016766743648960738, |
|
"loss": 1.3699, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.4712983965873718, |
|
"learning_rate": 0.00016715422119579164, |
|
"loss": 2.366, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.1462753266096115, |
|
"learning_rate": 0.00016664100590197588, |
|
"loss": 1.1087, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.0955154225230217, |
|
"learning_rate": 0.00016612779060816012, |
|
"loss": 0.0171, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 35.19109344482422, |
|
"learning_rate": 0.00016561457531434436, |
|
"loss": 1.6907, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.16912657022476196, |
|
"learning_rate": 0.00016510136002052862, |
|
"loss": 0.0055, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 34.529685974121094, |
|
"learning_rate": 0.00016458814472671286, |
|
"loss": 2.8261, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 13.726387023925781, |
|
"learning_rate": 0.0001640749294328971, |
|
"loss": 1.7874, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 29.94759750366211, |
|
"learning_rate": 0.00016356171413908134, |
|
"loss": 1.3416, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.5144930481910706, |
|
"learning_rate": 0.0001630484988452656, |
|
"loss": 0.7381, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.4536816477775574, |
|
"learning_rate": 0.00016253528355144985, |
|
"loss": 1.7744, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 34.2490119934082, |
|
"learning_rate": 0.00016202206825763408, |
|
"loss": 1.6362, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.7601956725120544, |
|
"learning_rate": 0.00016150885296381832, |
|
"loss": 1.0899, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.16101330518722534, |
|
"learning_rate": 0.00016099563767000256, |
|
"loss": 1.4228, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 2.7051339149475098, |
|
"learning_rate": 0.00016048242237618683, |
|
"loss": 1.2515, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 34.69645690917969, |
|
"learning_rate": 0.00015996920708237107, |
|
"loss": 1.1568, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.5005641579627991, |
|
"learning_rate": 0.0001594559917885553, |
|
"loss": 1.8697, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.9808595776557922, |
|
"learning_rate": 0.00015894277649473954, |
|
"loss": 1.867, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 34.12071228027344, |
|
"learning_rate": 0.0001584295612009238, |
|
"loss": 1.2028, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.4215242862701416, |
|
"learning_rate": 0.00015791634590710805, |
|
"loss": 1.5028, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 32.7039794921875, |
|
"learning_rate": 0.0001574031306132923, |
|
"loss": 1.851, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 11.424134254455566, |
|
"learning_rate": 0.00015688991531947653, |
|
"loss": 1.1995, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 18.550819396972656, |
|
"learning_rate": 0.0001563767000256608, |
|
"loss": 0.5795, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 25.513269424438477, |
|
"learning_rate": 0.00015586348473184503, |
|
"loss": 0.6537, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.043114595115184784, |
|
"learning_rate": 0.00015535026943802927, |
|
"loss": 1.4115, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.06105157732963562, |
|
"learning_rate": 0.0001548370541442135, |
|
"loss": 0.5029, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 6.027977466583252, |
|
"learning_rate": 0.00015432383885039775, |
|
"loss": 1.1377, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 9.911833763122559, |
|
"learning_rate": 0.000153810623556582, |
|
"loss": 0.6785, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.08647739142179489, |
|
"learning_rate": 0.00015329740826276625, |
|
"loss": 0.3428, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.06115918606519699, |
|
"learning_rate": 0.0001527841929689505, |
|
"loss": 0.5422, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 33.05713653564453, |
|
"learning_rate": 0.00015227097767513473, |
|
"loss": 0.9532, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.755101442337036, |
|
"learning_rate": 0.00015175776238131897, |
|
"loss": 0.8933, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.09266197681427, |
|
"learning_rate": 0.0001512445470875032, |
|
"loss": 1.3081, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.8350268602371216, |
|
"learning_rate": 0.00015073133179368745, |
|
"loss": 1.6349, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 28.465572357177734, |
|
"learning_rate": 0.00015021811649987168, |
|
"loss": 1.4432, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 24.18708038330078, |
|
"learning_rate": 0.00014970490120605595, |
|
"loss": 0.8608, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.06248585879802704, |
|
"learning_rate": 0.0001491916859122402, |
|
"loss": 0.7754, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.02755439281463623, |
|
"learning_rate": 0.00014867847061842443, |
|
"loss": 0.3644, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.03344785049557686, |
|
"learning_rate": 0.00014816525532460867, |
|
"loss": 2.7486, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 4.339983940124512, |
|
"learning_rate": 0.0001476520400307929, |
|
"loss": 0.8171, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 4.660573959350586, |
|
"learning_rate": 0.00014713882473697717, |
|
"loss": 2.1821, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 21.810422897338867, |
|
"learning_rate": 0.0001466256094431614, |
|
"loss": 1.0611, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 33.206607818603516, |
|
"learning_rate": 0.00014611239414934565, |
|
"loss": 1.5671, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.678447425365448, |
|
"learning_rate": 0.0001455991788555299, |
|
"loss": 1.0065, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 32.59206771850586, |
|
"learning_rate": 0.00014508596356171415, |
|
"loss": 1.8422, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 28.002819061279297, |
|
"learning_rate": 0.0001445727482678984, |
|
"loss": 1.1739, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 4.193221092224121, |
|
"learning_rate": 0.00014405953297408263, |
|
"loss": 0.431, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 34.987388610839844, |
|
"learning_rate": 0.00014354631768026687, |
|
"loss": 1.1974, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.1439794898033142, |
|
"learning_rate": 0.0001430331023864511, |
|
"loss": 2.1824, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.07825344055891037, |
|
"learning_rate": 0.00014251988709263537, |
|
"loss": 1.103, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 34.14964294433594, |
|
"learning_rate": 0.0001420066717988196, |
|
"loss": 2.1521, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.839199185371399, |
|
"learning_rate": 0.00014149345650500385, |
|
"loss": 0.0536, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 34.573585510253906, |
|
"learning_rate": 0.0001409802412111881, |
|
"loss": 1.5661, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.41036948561668396, |
|
"learning_rate": 0.00014046702591737236, |
|
"loss": 1.1817, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.08495642244815826, |
|
"learning_rate": 0.0001399538106235566, |
|
"loss": 1.5704, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 3.0547990798950195, |
|
"learning_rate": 0.00013944059532974083, |
|
"loss": 1.5402, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 7.8949713706970215, |
|
"learning_rate": 0.00013892738003592507, |
|
"loss": 0.4595, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 24.375001907348633, |
|
"learning_rate": 0.00013841416474210934, |
|
"loss": 0.7511, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 12.163422584533691, |
|
"learning_rate": 0.00013790094944829358, |
|
"loss": 0.5726, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 34.362632751464844, |
|
"learning_rate": 0.00013738773415447782, |
|
"loss": 1.3018, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 21.605195999145508, |
|
"learning_rate": 0.00013687451886066205, |
|
"loss": 1.2047, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 4.356873035430908, |
|
"learning_rate": 0.0001363613035668463, |
|
"loss": 1.3787, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 12.701129913330078, |
|
"learning_rate": 0.00013584808827303056, |
|
"loss": 1.0334, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 7.2957892417907715, |
|
"learning_rate": 0.0001353348729792148, |
|
"loss": 0.2939, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.25156155228614807, |
|
"learning_rate": 0.00013482165768539904, |
|
"loss": 0.7811, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.8677279353141785, |
|
"learning_rate": 0.00013430844239158328, |
|
"loss": 1.5475, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.13093720376491547, |
|
"learning_rate": 0.00013379522709776754, |
|
"loss": 0.7902, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.2733534276485443, |
|
"learning_rate": 0.00013328201180395178, |
|
"loss": 0.9059, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 34.48268508911133, |
|
"learning_rate": 0.00013276879651013602, |
|
"loss": 1.5932, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 33.39665603637695, |
|
"learning_rate": 0.00013225558121632026, |
|
"loss": 2.4436, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.6858634948730469, |
|
"learning_rate": 0.0001317423659225045, |
|
"loss": 0.8113, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 32.40642166137695, |
|
"learning_rate": 0.00013122915062868873, |
|
"loss": 2.1392, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.05664900690317154, |
|
"learning_rate": 0.00013071593533487297, |
|
"loss": 0.8724, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 30.9477481842041, |
|
"learning_rate": 0.0001302027200410572, |
|
"loss": 1.2746, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 2.9625468254089355, |
|
"learning_rate": 0.00012968950474724145, |
|
"loss": 2.0471, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 6.3701629638671875, |
|
"learning_rate": 0.00012917628945342572, |
|
"loss": 0.9569, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 5.0901007652282715, |
|
"learning_rate": 0.00012866307415960996, |
|
"loss": 0.8391, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 2.827472448348999, |
|
"learning_rate": 0.0001281498588657942, |
|
"loss": 0.5071, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.24257418513298035, |
|
"learning_rate": 0.00012763664357197843, |
|
"loss": 0.0258, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 34.39240264892578, |
|
"learning_rate": 0.0001271234282781627, |
|
"loss": 1.6826, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.06999631226062775, |
|
"learning_rate": 0.00012661021298434694, |
|
"loss": 2.854, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.7731397151947021, |
|
"learning_rate": 0.00012609699769053118, |
|
"loss": 1.5991, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.7090578675270081, |
|
"learning_rate": 0.00012558378239671542, |
|
"loss": 1.1829, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 33.13046646118164, |
|
"learning_rate": 0.00012507056710289968, |
|
"loss": 1.4736, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.6540558338165283, |
|
"learning_rate": 0.00012455735180908392, |
|
"loss": 1.6983, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.6605318784713745, |
|
"learning_rate": 0.00012404413651526816, |
|
"loss": 1.5005, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 31.07722282409668, |
|
"learning_rate": 0.0001235309212214524, |
|
"loss": 1.4853, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.135640025138855, |
|
"learning_rate": 0.00012301770592763664, |
|
"loss": 0.9949, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.14291101694107056, |
|
"learning_rate": 0.0001225044906338209, |
|
"loss": 1.7112, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.07842687517404556, |
|
"learning_rate": 0.00012199127534000514, |
|
"loss": 0.677, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.6385894417762756, |
|
"learning_rate": 0.00012147806004618938, |
|
"loss": 1.0463, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 34.2146110534668, |
|
"learning_rate": 0.00012096484475237362, |
|
"loss": 2.0908, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.15750457346439362, |
|
"learning_rate": 0.00012045162945855788, |
|
"loss": 1.6537, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.15273036062717438, |
|
"learning_rate": 0.00011993841416474212, |
|
"loss": 1.8717, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 4.405086517333984, |
|
"learning_rate": 0.00011942519887092636, |
|
"loss": 0.3942, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 2.592953681945801, |
|
"learning_rate": 0.0001189119835771106, |
|
"loss": 0.3217, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.5546024441719055, |
|
"learning_rate": 0.00011839876828329485, |
|
"loss": 1.0285, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.09479659050703049, |
|
"learning_rate": 0.00011788555298947909, |
|
"loss": 1.7519, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.09771596640348434, |
|
"learning_rate": 0.00011737233769566333, |
|
"loss": 1.2341, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 33.37510681152344, |
|
"learning_rate": 0.00011685912240184758, |
|
"loss": 3.3626, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 32.650489807128906, |
|
"learning_rate": 0.00011634590710803182, |
|
"loss": 0.9869, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 32.52039337158203, |
|
"learning_rate": 0.00011583269181421607, |
|
"loss": 1.8117, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 15.11254596710205, |
|
"learning_rate": 0.00011531947652040031, |
|
"loss": 1.288, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 3.6993765830993652, |
|
"learning_rate": 0.00011480626122658455, |
|
"loss": 0.863, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 34.809791564941406, |
|
"learning_rate": 0.00011429304593276879, |
|
"loss": 0.6192, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.10077141970396042, |
|
"learning_rate": 0.00011377983063895306, |
|
"loss": 0.571, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.07107020914554596, |
|
"learning_rate": 0.0001132666153451373, |
|
"loss": 0.996, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 33.04214096069336, |
|
"learning_rate": 0.00011275340005132153, |
|
"loss": 2.6481, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 32.640541076660156, |
|
"learning_rate": 0.00011224018475750577, |
|
"loss": 2.1778, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.7503925561904907, |
|
"learning_rate": 0.00011172696946369004, |
|
"loss": 1.5489, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.348849892616272, |
|
"learning_rate": 0.00011121375416987428, |
|
"loss": 1.1267, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.8353062272071838, |
|
"learning_rate": 0.00011070053887605852, |
|
"loss": 0.0233, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.4115784466266632, |
|
"learning_rate": 0.00011018732358224275, |
|
"loss": 1.3895, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.09716140478849411, |
|
"learning_rate": 0.00010967410828842699, |
|
"loss": 0.9971, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.07956521958112717, |
|
"learning_rate": 0.00010916089299461126, |
|
"loss": 0.4633, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 33.34837341308594, |
|
"learning_rate": 0.0001086476777007955, |
|
"loss": 0.5945, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 33.278682708740234, |
|
"learning_rate": 0.00010813446240697974, |
|
"loss": 1.0137, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 32.34352111816406, |
|
"learning_rate": 0.00010762124711316398, |
|
"loss": 2.238, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.8423387408256531, |
|
"learning_rate": 0.00010710803181934823, |
|
"loss": 0.0119, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.43691009283065796, |
|
"learning_rate": 0.00010659481652553247, |
|
"loss": 0.7785, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.27386584877967834, |
|
"learning_rate": 0.0001060816012317167, |
|
"loss": 1.4121, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.035622984170913696, |
|
"learning_rate": 0.00010556838593790094, |
|
"loss": 1.8601, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.6981528401374817, |
|
"learning_rate": 0.00010505517064408518, |
|
"loss": 1.2077, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 32.1064567565918, |
|
"learning_rate": 0.00010454195535026945, |
|
"loss": 1.5198, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 30.88555908203125, |
|
"learning_rate": 0.00010402874005645369, |
|
"loss": 2.0069, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.03494291752576828, |
|
"learning_rate": 0.00010351552476263793, |
|
"loss": 0.3124, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 1.9848475456237793, |
|
"learning_rate": 0.00010300230946882216, |
|
"loss": 0.8303, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 31.325864791870117, |
|
"learning_rate": 0.00010248909417500643, |
|
"loss": 0.9595, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.025403378531336784, |
|
"learning_rate": 0.00010197587888119067, |
|
"loss": 0.362, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 1.1182492971420288, |
|
"learning_rate": 0.00010146266358737491, |
|
"loss": 1.4183, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.0197270717471838, |
|
"learning_rate": 0.00010094944829355915, |
|
"loss": 0.9907, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.5864602327346802, |
|
"learning_rate": 0.00010043623299974341, |
|
"loss": 0.9328, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.019860761240124702, |
|
"learning_rate": 9.992301770592765e-05, |
|
"loss": 1.2027, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.27393853664398193, |
|
"learning_rate": 9.940980241211189e-05, |
|
"loss": 1.2231, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.06520062685012817, |
|
"learning_rate": 9.889658711829613e-05, |
|
"loss": 1.1252, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.10603094100952148, |
|
"learning_rate": 9.838337182448038e-05, |
|
"loss": 0.5362, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.21443594992160797, |
|
"learning_rate": 9.787015653066462e-05, |
|
"loss": 1.6563, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.15189635753631592, |
|
"learning_rate": 9.735694123684886e-05, |
|
"loss": 1.5657, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 32.36518478393555, |
|
"learning_rate": 9.68437259430331e-05, |
|
"loss": 2.1702, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.2264794558286667, |
|
"learning_rate": 9.633051064921735e-05, |
|
"loss": 1.2663, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 30.200082778930664, |
|
"learning_rate": 9.581729535540159e-05, |
|
"loss": 2.2151, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 3.5600388050079346, |
|
"learning_rate": 9.530408006158584e-05, |
|
"loss": 0.7643, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.1124127209186554, |
|
"learning_rate": 9.479086476777008e-05, |
|
"loss": 0.679, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 28.294071197509766, |
|
"learning_rate": 9.427764947395433e-05, |
|
"loss": 1.5172, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 26.79751205444336, |
|
"learning_rate": 9.376443418013857e-05, |
|
"loss": 0.4192, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.1510336697101593, |
|
"learning_rate": 9.325121888632282e-05, |
|
"loss": 2.1576, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.32501623034477234, |
|
"learning_rate": 9.273800359250706e-05, |
|
"loss": 1.1192, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 26.4614315032959, |
|
"learning_rate": 9.22247882986913e-05, |
|
"loss": 0.7766, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.20047006011009216, |
|
"learning_rate": 9.171157300487555e-05, |
|
"loss": 0.6836, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 10.277036666870117, |
|
"learning_rate": 9.119835771105979e-05, |
|
"loss": 0.4951, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 9.455358505249023, |
|
"learning_rate": 9.068514241724404e-05, |
|
"loss": 0.2494, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 5.371951103210449, |
|
"learning_rate": 9.017192712342828e-05, |
|
"loss": 0.7672, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 4.935911655426025, |
|
"learning_rate": 8.965871182961254e-05, |
|
"loss": 0.7815, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.05824290215969086, |
|
"learning_rate": 8.914549653579677e-05, |
|
"loss": 0.4563, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 29.326370239257812, |
|
"learning_rate": 8.863228124198101e-05, |
|
"loss": 0.9615, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 30.599393844604492, |
|
"learning_rate": 8.811906594816526e-05, |
|
"loss": 0.617, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.6331340074539185, |
|
"learning_rate": 8.76058506543495e-05, |
|
"loss": 1.0741, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 34.11085891723633, |
|
"learning_rate": 8.709263536053374e-05, |
|
"loss": 1.9807, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.4941723942756653, |
|
"learning_rate": 8.6579420066718e-05, |
|
"loss": 1.6712, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.8858903646469116, |
|
"learning_rate": 8.606620477290223e-05, |
|
"loss": 1.1382, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.4389359951019287, |
|
"learning_rate": 8.555298947908647e-05, |
|
"loss": 0.0166, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.27877870202064514, |
|
"learning_rate": 8.503977418527072e-05, |
|
"loss": 1.8697, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.0968392938375473, |
|
"learning_rate": 8.452655889145496e-05, |
|
"loss": 0.9307, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.07415905594825745, |
|
"learning_rate": 8.401334359763922e-05, |
|
"loss": 1.3474, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 32.0526008605957, |
|
"learning_rate": 8.350012830382345e-05, |
|
"loss": 1.9036, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 31.857463836669922, |
|
"learning_rate": 8.29869130100077e-05, |
|
"loss": 1.7843, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.16152064502239227, |
|
"learning_rate": 8.247369771619195e-05, |
|
"loss": 1.138, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 31.4908504486084, |
|
"learning_rate": 8.19604824223762e-05, |
|
"loss": 1.348, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.8940775990486145, |
|
"learning_rate": 8.144726712856044e-05, |
|
"loss": 1.4317, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.3364328145980835, |
|
"learning_rate": 8.093405183474469e-05, |
|
"loss": 1.3774, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 32.3911018371582, |
|
"learning_rate": 8.042083654092893e-05, |
|
"loss": 1.8806, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.0908791646361351, |
|
"learning_rate": 7.990762124711317e-05, |
|
"loss": 0.3828, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.7057339549064636, |
|
"learning_rate": 7.939440595329742e-05, |
|
"loss": 1.8282, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.9124455451965332, |
|
"learning_rate": 7.888119065948166e-05, |
|
"loss": 0.8194, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.6995695233345032, |
|
"learning_rate": 7.83679753656659e-05, |
|
"loss": 2.4169, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 30.961750030517578, |
|
"learning_rate": 7.785476007185013e-05, |
|
"loss": 1.0782, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 30.930931091308594, |
|
"learning_rate": 7.734154477803439e-05, |
|
"loss": 1.773, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 2.427703857421875, |
|
"learning_rate": 7.682832948421863e-05, |
|
"loss": 1.1838, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 28.99689292907715, |
|
"learning_rate": 7.631511419040288e-05, |
|
"loss": 0.9657, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.0632254034280777, |
|
"learning_rate": 7.580189889658712e-05, |
|
"loss": 0.9273, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.06876543164253235, |
|
"learning_rate": 7.528868360277137e-05, |
|
"loss": 0.6686, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.059902504086494446, |
|
"learning_rate": 7.477546830895561e-05, |
|
"loss": 0.6242, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 30.04041290283203, |
|
"learning_rate": 7.426225301513986e-05, |
|
"loss": 2.3915, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 2.442535400390625, |
|
"learning_rate": 7.37490377213241e-05, |
|
"loss": 0.5622, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.0483098030090332, |
|
"learning_rate": 7.323582242750834e-05, |
|
"loss": 1.5706, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.048585113137960434, |
|
"learning_rate": 7.272260713369259e-05, |
|
"loss": 0.8918, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 29.45044708251953, |
|
"learning_rate": 7.220939183987683e-05, |
|
"loss": 1.4446, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.03927851840853691, |
|
"learning_rate": 7.169617654606108e-05, |
|
"loss": 0.6336, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 5.8573174476623535, |
|
"learning_rate": 7.118296125224532e-05, |
|
"loss": 0.5349, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.04964911565184593, |
|
"learning_rate": 7.066974595842957e-05, |
|
"loss": 1.471, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 1.5833439826965332, |
|
"learning_rate": 7.015653066461381e-05, |
|
"loss": 0.3127, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.6209262609481812, |
|
"learning_rate": 6.964331537079806e-05, |
|
"loss": 0.3781, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 34.23354721069336, |
|
"learning_rate": 6.91301000769823e-05, |
|
"loss": 1.8894, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.40863272547721863, |
|
"learning_rate": 6.861688478316654e-05, |
|
"loss": 0.8985, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.26213815808296204, |
|
"learning_rate": 6.810366948935078e-05, |
|
"loss": 1.038, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 32.263370513916016, |
|
"learning_rate": 6.759045419553503e-05, |
|
"loss": 2.2679, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 34.00870895385742, |
|
"learning_rate": 6.707723890171927e-05, |
|
"loss": 2.075, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.4284890294075012, |
|
"learning_rate": 6.656402360790351e-05, |
|
"loss": 0.7597, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.3407612144947052, |
|
"learning_rate": 6.605080831408776e-05, |
|
"loss": 0.4383, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.467129111289978, |
|
"learning_rate": 6.5537593020272e-05, |
|
"loss": 1.2687, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.13389898836612701, |
|
"learning_rate": 6.502437772645625e-05, |
|
"loss": 2.1633, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.13675318658351898, |
|
"learning_rate": 6.451116243264049e-05, |
|
"loss": 0.7836, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.5350820422172546, |
|
"learning_rate": 6.399794713882474e-05, |
|
"loss": 0.7649, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.48448631167411804, |
|
"learning_rate": 6.348473184500898e-05, |
|
"loss": 1.38, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.5778862237930298, |
|
"learning_rate": 6.297151655119323e-05, |
|
"loss": 2.2181, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 1.1682463884353638, |
|
"learning_rate": 6.245830125737747e-05, |
|
"loss": 1.0876, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 31.393110275268555, |
|
"learning_rate": 6.194508596356173e-05, |
|
"loss": 1.1128, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.6889828443527222, |
|
"learning_rate": 6.143187066974596e-05, |
|
"loss": 0.7252, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.5311009883880615, |
|
"learning_rate": 6.0918655375930204e-05, |
|
"loss": 0.3959, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.43320441246032715, |
|
"learning_rate": 6.040544008211445e-05, |
|
"loss": 1.2466, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.06079118326306343, |
|
"learning_rate": 5.989222478829869e-05, |
|
"loss": 2.0125, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.042944520711898804, |
|
"learning_rate": 5.937900949448294e-05, |
|
"loss": 0.779, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.9245113730430603, |
|
"learning_rate": 5.886579420066718e-05, |
|
"loss": 0.7976, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.039791032671928406, |
|
"learning_rate": 5.835257890685143e-05, |
|
"loss": 1.4881, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.6234956979751587, |
|
"learning_rate": 5.783936361303567e-05, |
|
"loss": 0.6912, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.9020193219184875, |
|
"learning_rate": 5.7326148319219916e-05, |
|
"loss": 0.7522, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.034982066601514816, |
|
"learning_rate": 5.681293302540416e-05, |
|
"loss": 1.2431, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.04073556512594223, |
|
"learning_rate": 5.6299717731588407e-05, |
|
"loss": 1.1053, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 35.25714874267578, |
|
"learning_rate": 5.5786502437772645e-05, |
|
"loss": 1.4825, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.7252044081687927, |
|
"learning_rate": 5.52732871439569e-05, |
|
"loss": 2.1285, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 31.510704040527344, |
|
"learning_rate": 5.4760071850141136e-05, |
|
"loss": 0.7914, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.44585466384887695, |
|
"learning_rate": 5.4246856556325375e-05, |
|
"loss": 1.2094, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.2099919468164444, |
|
"learning_rate": 5.373364126250963e-05, |
|
"loss": 2.496, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 30.501766204833984, |
|
"learning_rate": 5.3220425968693866e-05, |
|
"loss": 1.0997, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.36132165789604187, |
|
"learning_rate": 5.270721067487812e-05, |
|
"loss": 1.4378, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 1.6164754629135132, |
|
"learning_rate": 5.219399538106236e-05, |
|
"loss": 1.1191, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.9825451374053955, |
|
"learning_rate": 5.16807800872466e-05, |
|
"loss": 0.861, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.28137707710266113, |
|
"learning_rate": 5.116756479343084e-05, |
|
"loss": 0.4387, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.2143826186656952, |
|
"learning_rate": 5.0654349499615094e-05, |
|
"loss": 0.9561, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.25483042001724243, |
|
"learning_rate": 5.014113420579933e-05, |
|
"loss": 1.4271, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.36998990178108215, |
|
"learning_rate": 4.962791891198358e-05, |
|
"loss": 0.8621, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.08702105283737183, |
|
"learning_rate": 4.9114703618167824e-05, |
|
"loss": 0.4751, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.455074667930603, |
|
"learning_rate": 4.860148832435207e-05, |
|
"loss": 2.7804, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.41690900921821594, |
|
"learning_rate": 4.8088273030536315e-05, |
|
"loss": 0.444, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.0824534222483635, |
|
"learning_rate": 4.757505773672056e-05, |
|
"loss": 2.2328, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.7097424268722534, |
|
"learning_rate": 4.70618424429048e-05, |
|
"loss": 1.232, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 32.93928527832031, |
|
"learning_rate": 4.6548627149089045e-05, |
|
"loss": 2.1723, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.5564465522766113, |
|
"learning_rate": 4.603541185527329e-05, |
|
"loss": 1.3908, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 1.1192642450332642, |
|
"learning_rate": 4.5522196561457536e-05, |
|
"loss": 0.2986, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.09087738394737244, |
|
"learning_rate": 4.5008981267641774e-05, |
|
"loss": 1.3239, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 31.66700553894043, |
|
"learning_rate": 4.449576597382602e-05, |
|
"loss": 1.9121, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.08067402243614197, |
|
"learning_rate": 4.3982550680010266e-05, |
|
"loss": 1.4794, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 31.138818740844727, |
|
"learning_rate": 4.346933538619451e-05, |
|
"loss": 1.3591, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 30.739635467529297, |
|
"learning_rate": 4.2956120092378757e-05, |
|
"loss": 2.1444, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 29.64994239807129, |
|
"learning_rate": 4.2442904798563e-05, |
|
"loss": 1.1349, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.06835217773914337, |
|
"learning_rate": 4.192968950474724e-05, |
|
"loss": 1.1995, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.07994609326124191, |
|
"learning_rate": 4.1416474210931486e-05, |
|
"loss": 1.106, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.077377088367939, |
|
"learning_rate": 4.090325891711573e-05, |
|
"loss": 0.5515, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.07298921793699265, |
|
"learning_rate": 4.039004362329998e-05, |
|
"loss": 1.3587, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 2.3832900524139404, |
|
"learning_rate": 3.987682832948422e-05, |
|
"loss": 0.5702, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.3254412412643433, |
|
"learning_rate": 3.936361303566847e-05, |
|
"loss": 0.6837, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 31.57373046875, |
|
"learning_rate": 3.885039774185271e-05, |
|
"loss": 1.4529, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 31.78739356994629, |
|
"learning_rate": 3.833718244803695e-05, |
|
"loss": 1.1592, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.885814368724823, |
|
"learning_rate": 3.78239671542212e-05, |
|
"loss": 2.0692, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.11006342619657516, |
|
"learning_rate": 3.7310751860405444e-05, |
|
"loss": 1.3088, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.7606900930404663, |
|
"learning_rate": 3.679753656658968e-05, |
|
"loss": 0.7503, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 34.01801681518555, |
|
"learning_rate": 3.628432127277393e-05, |
|
"loss": 2.3927, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 31.359045028686523, |
|
"learning_rate": 3.5771105978958174e-05, |
|
"loss": 1.7152, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.4896736741065979, |
|
"learning_rate": 3.525789068514242e-05, |
|
"loss": 1.1745, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 33.61710739135742, |
|
"learning_rate": 3.4744675391326665e-05, |
|
"loss": 2.6372, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.2002750039100647, |
|
"learning_rate": 3.423146009751091e-05, |
|
"loss": 0.3958, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.5248939394950867, |
|
"learning_rate": 3.3718244803695156e-05, |
|
"loss": 1.1428, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.20893482863903046, |
|
"learning_rate": 3.32050295098794e-05, |
|
"loss": 1.1695, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 31.339996337890625, |
|
"learning_rate": 3.269181421606364e-05, |
|
"loss": 1.9207, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.20348432660102844, |
|
"learning_rate": 3.2178598922247886e-05, |
|
"loss": 1.587, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 31.108985900878906, |
|
"learning_rate": 3.1665383628432124e-05, |
|
"loss": 1.3677, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 29.706878662109375, |
|
"learning_rate": 3.115216833461637e-05, |
|
"loss": 2.0426, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.0215935707092285, |
|
"learning_rate": 3.0638953040800616e-05, |
|
"loss": 1.7041, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.23811456561088562, |
|
"learning_rate": 3.012573774698486e-05, |
|
"loss": 1.2077, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.21963796019554138, |
|
"learning_rate": 2.9612522453169107e-05, |
|
"loss": 1.1672, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 1.149311900138855, |
|
"learning_rate": 2.9099307159353352e-05, |
|
"loss": 1.6651, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.1534388065338135, |
|
"learning_rate": 2.8586091865537594e-05, |
|
"loss": 1.2245, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 1.3270301818847656, |
|
"learning_rate": 2.807287657172184e-05, |
|
"loss": 0.6268, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.2713325619697571, |
|
"learning_rate": 2.7559661277906085e-05, |
|
"loss": 1.6962, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.25903043150901794, |
|
"learning_rate": 2.704644598409033e-05, |
|
"loss": 0.4256, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.456488698720932, |
|
"learning_rate": 2.6533230690274573e-05, |
|
"loss": 0.7452, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 30.572643280029297, |
|
"learning_rate": 2.6020015396458812e-05, |
|
"loss": 1.9218, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 31.96410369873047, |
|
"learning_rate": 2.5506800102643057e-05, |
|
"loss": 2.2211, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.5312764644622803, |
|
"learning_rate": 2.4993584808827306e-05, |
|
"loss": 0.6983, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.23434561491012573, |
|
"learning_rate": 2.448036951501155e-05, |
|
"loss": 0.7466, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 36.59482192993164, |
|
"learning_rate": 2.3967154221195794e-05, |
|
"loss": 1.6694, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 32.04636001586914, |
|
"learning_rate": 2.3453938927380036e-05, |
|
"loss": 1.6496, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.2819555699825287, |
|
"learning_rate": 2.294072363356428e-05, |
|
"loss": 1.3424, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.46713873744010925, |
|
"learning_rate": 2.2427508339748527e-05, |
|
"loss": 0.0149, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.5181002020835876, |
|
"learning_rate": 2.1914293045932773e-05, |
|
"loss": 0.7906, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.20827196538448334, |
|
"learning_rate": 2.140107775211701e-05, |
|
"loss": 0.4505, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 32.157432556152344, |
|
"learning_rate": 2.0887862458301257e-05, |
|
"loss": 2.5997, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.1746426224708557, |
|
"learning_rate": 2.0374647164485502e-05, |
|
"loss": 1.6267, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 31.728593826293945, |
|
"learning_rate": 1.9861431870669748e-05, |
|
"loss": 0.8132, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.8428208231925964, |
|
"learning_rate": 1.9348216576853994e-05, |
|
"loss": 1.4408, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.6710448861122131, |
|
"learning_rate": 1.8835001283038236e-05, |
|
"loss": 0.7472, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.15114788711071014, |
|
"learning_rate": 1.8321785989222478e-05, |
|
"loss": 0.3549, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.48726460337638855, |
|
"learning_rate": 1.7808570695406723e-05, |
|
"loss": 1.0424, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 31.90473747253418, |
|
"learning_rate": 1.729535540159097e-05, |
|
"loss": 1.6014, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.11974696815013885, |
|
"learning_rate": 1.678214010777521e-05, |
|
"loss": 1.367, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 31.7264461517334, |
|
"learning_rate": 1.6268924813959457e-05, |
|
"loss": 0.8339, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.9428460001945496, |
|
"learning_rate": 1.5755709520143702e-05, |
|
"loss": 1.2831, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.5526345372200012, |
|
"learning_rate": 1.5242494226327944e-05, |
|
"loss": 0.7811, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.4995884299278259, |
|
"learning_rate": 1.4729278932512188e-05, |
|
"loss": 0.454, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 32.1862907409668, |
|
"learning_rate": 1.4216063638696434e-05, |
|
"loss": 1.7057, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 30.95826530456543, |
|
"learning_rate": 1.3702848344880677e-05, |
|
"loss": 1.6207, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.10005848109722137, |
|
"learning_rate": 1.3189633051064923e-05, |
|
"loss": 0.8695, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 31.5998477935791, |
|
"learning_rate": 1.2676417757249169e-05, |
|
"loss": 1.9645, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.797754168510437, |
|
"learning_rate": 1.216320246343341e-05, |
|
"loss": 1.1224, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.10280079394578934, |
|
"learning_rate": 1.1649987169617656e-05, |
|
"loss": 0.8117, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 31.812061309814453, |
|
"learning_rate": 1.1136771875801898e-05, |
|
"loss": 0.5172, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.08260977268218994, |
|
"learning_rate": 1.0623556581986144e-05, |
|
"loss": 1.6816, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.6431272029876709, |
|
"learning_rate": 1.0110341288170388e-05, |
|
"loss": 2.2201, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.5408867597579956, |
|
"learning_rate": 9.597125994354632e-06, |
|
"loss": 0.9963, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.6773453950881958, |
|
"learning_rate": 9.083910700538877e-06, |
|
"loss": 2.3436, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 33.85165786743164, |
|
"learning_rate": 8.570695406723121e-06, |
|
"loss": 1.229, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.09623867273330688, |
|
"learning_rate": 8.057480112907365e-06, |
|
"loss": 0.9852, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.6665917038917542, |
|
"learning_rate": 7.5442648190916095e-06, |
|
"loss": 0.9811, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.1049143448472023, |
|
"learning_rate": 7.031049525275854e-06, |
|
"loss": 1.6639, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 1.028926968574524, |
|
"learning_rate": 6.517834231460097e-06, |
|
"loss": 0.6745, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.5345853567123413, |
|
"learning_rate": 6.004618937644342e-06, |
|
"loss": 1.9351, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.09490037709474564, |
|
"learning_rate": 5.4914036438285865e-06, |
|
"loss": 1.0257, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.7808061838150024, |
|
"learning_rate": 4.978188350012831e-06, |
|
"loss": 1.6728, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.4944613575935364, |
|
"learning_rate": 4.464973056197075e-06, |
|
"loss": 1.6118, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 1.0724005699157715, |
|
"learning_rate": 3.951757762381319e-06, |
|
"loss": 1.352, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.8290554285049438, |
|
"learning_rate": 3.4385424685655636e-06, |
|
"loss": 2.4513, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.532361626625061, |
|
"learning_rate": 2.9253271747498074e-06, |
|
"loss": 0.412, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.1319778561592102, |
|
"learning_rate": 2.412111880934052e-06, |
|
"loss": 0.7912, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.6048825979232788, |
|
"learning_rate": 1.8988965871182964e-06, |
|
"loss": 0.9813, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.4283379018306732, |
|
"learning_rate": 1.3856812933025406e-06, |
|
"loss": 0.7374, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.7439431548118591, |
|
"learning_rate": 8.724659994867848e-07, |
|
"loss": 0.3349, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.49157199263572693, |
|
"learning_rate": 3.5925070567102904e-07, |
|
"loss": 1.4183, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 3897, |
|
"total_flos": 9.121574551619174e+17, |
|
"train_loss": 1.2556480791153037, |
|
"train_runtime": 633.8251, |
|
"train_samples_per_second": 18.444, |
|
"train_steps_per_second": 6.148 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 3897, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 1000, |
|
"total_flos": 9.121574551619174e+17, |
|
"train_batch_size": 3, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|