|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 399.0662251655629, |
|
"eval_steps": 500, |
|
"global_step": 4000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 2.4834437086092715e-05, |
|
"loss": 3.036, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 4.966887417218543e-05, |
|
"loss": 2.727, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 8.07, |
|
"learning_rate": 7.450331125827815e-05, |
|
"loss": 2.4733, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 11.07, |
|
"learning_rate": 9.933774834437086e-05, |
|
"loss": 2.3326, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 14.07, |
|
"learning_rate": 0.00012417218543046358, |
|
"loss": 2.2252, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 17.07, |
|
"learning_rate": 0.0001490066225165563, |
|
"loss": 2.1346, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 20.07, |
|
"learning_rate": 0.000173841059602649, |
|
"loss": 2.0513, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 23.07, |
|
"learning_rate": 0.00019867549668874172, |
|
"loss": 1.9759, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 26.07, |
|
"learning_rate": 0.00022350993377483444, |
|
"loss": 1.9071, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 29.07, |
|
"learning_rate": 0.00024834437086092715, |
|
"loss": 1.838, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 32.07, |
|
"learning_rate": 0.0002731788079470199, |
|
"loss": 1.7631, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 35.07, |
|
"learning_rate": 0.0002980132450331126, |
|
"loss": 1.6968, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 38.07, |
|
"learning_rate": 0.00032284768211920527, |
|
"loss": 1.6119, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 41.07, |
|
"learning_rate": 0.000347682119205298, |
|
"loss": 1.5422, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 44.07, |
|
"learning_rate": 0.00037251655629139076, |
|
"loss": 1.4734, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 47.07, |
|
"learning_rate": 0.00039735099337748344, |
|
"loss": 1.3984, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 50.07, |
|
"learning_rate": 0.0004221854304635762, |
|
"loss": 1.3118, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 53.07, |
|
"learning_rate": 0.0004470198675496689, |
|
"loss": 1.2615, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 56.07, |
|
"learning_rate": 0.00047185430463576156, |
|
"loss": 1.1864, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 59.07, |
|
"learning_rate": 0.0004966887417218543, |
|
"loss": 1.1384, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 62.07, |
|
"learning_rate": 0.000521523178807947, |
|
"loss": 1.0538, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 65.07, |
|
"learning_rate": 0.0005463576158940398, |
|
"loss": 1.0015, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 68.07, |
|
"learning_rate": 0.0005711920529801324, |
|
"loss": 0.939, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 71.07, |
|
"learning_rate": 0.0005960264900662252, |
|
"loss": 0.8865, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 74.07, |
|
"learning_rate": 0.0006208609271523179, |
|
"loss": 0.8369, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 77.07, |
|
"learning_rate": 0.0006456953642384105, |
|
"loss": 0.7761, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 80.07, |
|
"learning_rate": 0.0006705298013245033, |
|
"loss": 0.7468, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 83.07, |
|
"learning_rate": 0.000695364238410596, |
|
"loss": 0.6898, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 86.07, |
|
"learning_rate": 0.0007201986754966887, |
|
"loss": 0.6711, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 89.07, |
|
"learning_rate": 0.0007450331125827815, |
|
"loss": 0.6476, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 92.07, |
|
"learning_rate": 0.0007698675496688743, |
|
"loss": 0.6337, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 95.07, |
|
"learning_rate": 0.0007947019867549669, |
|
"loss": 0.5984, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 98.07, |
|
"learning_rate": 0.0008195364238410596, |
|
"loss": 0.5906, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 101.07, |
|
"learning_rate": 0.0008443708609271524, |
|
"loss": 0.5713, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 104.07, |
|
"learning_rate": 0.000869205298013245, |
|
"loss": 0.5323, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 107.07, |
|
"learning_rate": 0.0008940397350993377, |
|
"loss": 0.5379, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 110.07, |
|
"learning_rate": 0.0009188741721854305, |
|
"loss": 0.5165, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 113.07, |
|
"learning_rate": 0.0009437086092715231, |
|
"loss": 0.5117, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 116.07, |
|
"learning_rate": 0.0009685430463576159, |
|
"loss": 0.4923, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 119.07, |
|
"learning_rate": 0.0009933774834437086, |
|
"loss": 0.4754, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 122.07, |
|
"learning_rate": 0.0010182119205298015, |
|
"loss": 0.4723, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 125.07, |
|
"learning_rate": 0.001043046357615894, |
|
"loss": 0.4699, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 128.07, |
|
"learning_rate": 0.0010678807947019867, |
|
"loss": 0.4432, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 131.07, |
|
"learning_rate": 0.0010927152317880796, |
|
"loss": 0.4537, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 134.07, |
|
"learning_rate": 0.0011175496688741722, |
|
"loss": 0.4658, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 137.07, |
|
"learning_rate": 0.0011423841059602648, |
|
"loss": 0.4134, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 140.07, |
|
"learning_rate": 0.0011672185430463577, |
|
"loss": 0.4326, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 143.07, |
|
"learning_rate": 0.0011920529801324503, |
|
"loss": 0.452, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 146.07, |
|
"learning_rate": 0.001216887417218543, |
|
"loss": 0.4637, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 149.07, |
|
"learning_rate": 0.0012417218543046358, |
|
"loss": 0.4336, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 152.07, |
|
"learning_rate": 0.0012665562913907287, |
|
"loss": 0.4468, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 155.07, |
|
"learning_rate": 0.001291390728476821, |
|
"loss": 0.4415, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 158.07, |
|
"learning_rate": 0.001316225165562914, |
|
"loss": 0.4216, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 161.07, |
|
"learning_rate": 0.0013410596026490066, |
|
"loss": 0.4268, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 164.07, |
|
"learning_rate": 0.0013658940397350994, |
|
"loss": 0.4177, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 167.07, |
|
"learning_rate": 0.001390728476821192, |
|
"loss": 0.4439, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 170.07, |
|
"learning_rate": 0.001415562913907285, |
|
"loss": 0.444, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 173.07, |
|
"learning_rate": 0.0014403973509933773, |
|
"loss": 0.4364, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 176.07, |
|
"learning_rate": 0.0014652317880794702, |
|
"loss": 0.4292, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 179.07, |
|
"learning_rate": 0.001490066225165563, |
|
"loss": 0.4343, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 182.07, |
|
"learning_rate": 0.0015149006622516557, |
|
"loss": 0.4513, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 185.07, |
|
"learning_rate": 0.0015397350993377485, |
|
"loss": 0.453, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 188.07, |
|
"learning_rate": 0.0015645695364238411, |
|
"loss": 0.4163, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 191.07, |
|
"learning_rate": 0.0015894039735099338, |
|
"loss": 0.441, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 194.07, |
|
"learning_rate": 0.0016142384105960264, |
|
"loss": 0.4564, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 197.07, |
|
"learning_rate": 0.0016390728476821193, |
|
"loss": 0.4733, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 200.07, |
|
"learning_rate": 0.001663907284768212, |
|
"loss": 0.4634, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 203.07, |
|
"learning_rate": 0.0016887417218543047, |
|
"loss": 0.4723, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 206.07, |
|
"learning_rate": 0.0017135761589403972, |
|
"loss": 0.4666, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 209.07, |
|
"learning_rate": 0.00173841059602649, |
|
"loss": 0.4924, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 212.07, |
|
"learning_rate": 0.0017632450331125829, |
|
"loss": 0.4821, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 215.07, |
|
"learning_rate": 0.0017880794701986755, |
|
"loss": 0.48, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 218.07, |
|
"learning_rate": 0.0018129139072847683, |
|
"loss": 0.5119, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 221.07, |
|
"learning_rate": 0.001837748344370861, |
|
"loss": 0.5396, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 224.07, |
|
"learning_rate": 0.0018625827814569538, |
|
"loss": 0.5597, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 227.07, |
|
"learning_rate": 0.0018874172185430462, |
|
"loss": 0.5535, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 230.07, |
|
"learning_rate": 0.001912251655629139, |
|
"loss": 0.5951, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 233.07, |
|
"learning_rate": 0.0019370860927152317, |
|
"loss": 0.553, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 236.07, |
|
"learning_rate": 0.0019619205298013246, |
|
"loss": 0.5303, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 239.07, |
|
"learning_rate": 0.001986754966887417, |
|
"loss": 0.5958, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 242.07, |
|
"learning_rate": 0.00201158940397351, |
|
"loss": 0.6002, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 245.07, |
|
"learning_rate": 0.002036423841059603, |
|
"loss": 0.6858, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 248.07, |
|
"learning_rate": 0.0020612582781456956, |
|
"loss": 0.6152, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 251.07, |
|
"learning_rate": 0.002086092715231788, |
|
"loss": 0.6171, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 254.07, |
|
"learning_rate": 0.002110927152317881, |
|
"loss": 0.6383, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 257.07, |
|
"learning_rate": 0.0021357615894039735, |
|
"loss": 0.7459, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 260.07, |
|
"learning_rate": 0.002160596026490066, |
|
"loss": 0.7926, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 263.07, |
|
"learning_rate": 0.002185430463576159, |
|
"loss": 0.8945, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 266.07, |
|
"learning_rate": 0.002210264900662252, |
|
"loss": 0.8908, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 269.07, |
|
"learning_rate": 0.0022350993377483444, |
|
"loss": 0.9268, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 272.07, |
|
"learning_rate": 0.002259933774834437, |
|
"loss": 0.8545, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 275.07, |
|
"learning_rate": 0.0022847682119205297, |
|
"loss": 0.8369, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 278.07, |
|
"learning_rate": 0.0023096026490066228, |
|
"loss": 0.8851, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 281.07, |
|
"learning_rate": 0.0023344370860927154, |
|
"loss": 0.8496, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 284.07, |
|
"learning_rate": 0.002359271523178808, |
|
"loss": 0.8121, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 287.07, |
|
"learning_rate": 0.0023841059602649007, |
|
"loss": 0.76, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 290.07, |
|
"learning_rate": 0.0024089403973509937, |
|
"loss": 0.8552, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 293.07, |
|
"learning_rate": 0.002433774834437086, |
|
"loss": 0.9308, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 296.07, |
|
"learning_rate": 0.002458609271523179, |
|
"loss": 0.9471, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 299.07, |
|
"learning_rate": 0.0024834437086092716, |
|
"loss": 0.9702, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 302.07, |
|
"learning_rate": 0.0025082781456953643, |
|
"loss": 1.0656, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 305.07, |
|
"learning_rate": 0.0025331125827814573, |
|
"loss": 1.0608, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 308.07, |
|
"learning_rate": 0.0025579470198675495, |
|
"loss": 1.0397, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 311.07, |
|
"learning_rate": 0.002582781456953642, |
|
"loss": 1.0805, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 314.07, |
|
"learning_rate": 0.0026076158940397352, |
|
"loss": 1.1374, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 317.07, |
|
"learning_rate": 0.002632450331125828, |
|
"loss": 1.4001, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 320.07, |
|
"learning_rate": 0.0026572847682119205, |
|
"loss": 1.5648, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 323.07, |
|
"learning_rate": 0.002682119205298013, |
|
"loss": 1.7286, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 326.07, |
|
"learning_rate": 0.002706953642384106, |
|
"loss": 1.5511, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 329.07, |
|
"learning_rate": 0.002731788079470199, |
|
"loss": 1.3367, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 332.07, |
|
"learning_rate": 0.0027566225165562915, |
|
"loss": 1.2894, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 335.07, |
|
"learning_rate": 0.002781456953642384, |
|
"loss": 1.3288, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 338.07, |
|
"learning_rate": 0.002806291390728477, |
|
"loss": 1.2965, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 341.07, |
|
"learning_rate": 0.00283112582781457, |
|
"loss": 1.1965, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 344.07, |
|
"learning_rate": 0.002855960264900662, |
|
"loss": 1.1625, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 347.07, |
|
"learning_rate": 0.0028807947019867546, |
|
"loss": 1.2772, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 350.07, |
|
"learning_rate": 0.0029056291390728477, |
|
"loss": 1.2967, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 353.07, |
|
"learning_rate": 0.0029304635761589403, |
|
"loss": 1.5922, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 356.07, |
|
"learning_rate": 0.002955298013245033, |
|
"loss": 1.5614, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 359.07, |
|
"learning_rate": 0.002980132450331126, |
|
"loss": 1.9061, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 362.07, |
|
"learning_rate": 0.0030049668874172187, |
|
"loss": 2.0664, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 365.07, |
|
"learning_rate": 0.0030298013245033113, |
|
"loss": 1.8159, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 368.07, |
|
"learning_rate": 0.003054635761589404, |
|
"loss": 1.6505, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 371.07, |
|
"learning_rate": 0.003079470198675497, |
|
"loss": 1.4858, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 374.07, |
|
"learning_rate": 0.0031043046357615896, |
|
"loss": 1.4008, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 377.07, |
|
"learning_rate": 0.0031291390728476823, |
|
"loss": 1.2804, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 380.07, |
|
"learning_rate": 0.0031539735099337745, |
|
"loss": 1.3041, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 383.07, |
|
"learning_rate": 0.0031788079470198675, |
|
"loss": 1.3115, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 386.07, |
|
"learning_rate": 0.00320364238410596, |
|
"loss": 1.3194, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 389.07, |
|
"learning_rate": 0.003228476821192053, |
|
"loss": 1.3681, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 392.07, |
|
"learning_rate": 0.003253311258278146, |
|
"loss": 1.3676, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 395.07, |
|
"learning_rate": 0.0032781456953642385, |
|
"loss": 1.702, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 398.07, |
|
"learning_rate": 0.003302980132450331, |
|
"loss": 2.7594, |
|
"step": 3990 |
|
} |
|
], |
|
"logging_steps": 30, |
|
"max_steps": 60400, |
|
"num_train_epochs": 400, |
|
"save_steps": 500, |
|
"total_flos": 2204652011520000.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|