|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 200, |
|
"global_step": 1124, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0017793594306049821, |
|
"grad_norm": 4.078867539710251, |
|
"learning_rate": 9.99998046979289e-06, |
|
"loss": 0.1569, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0035587188612099642, |
|
"grad_norm": 3.2410804701729754, |
|
"learning_rate": 9.999921879324127e-06, |
|
"loss": 0.1432, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.005338078291814947, |
|
"grad_norm": 3.295193648708453, |
|
"learning_rate": 9.999824229051425e-06, |
|
"loss": 0.1578, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0071174377224199285, |
|
"grad_norm": 2.7288399765413462, |
|
"learning_rate": 9.999687519737639e-06, |
|
"loss": 0.1245, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.008896797153024912, |
|
"grad_norm": 2.3751574463428895, |
|
"learning_rate": 9.99951175245075e-06, |
|
"loss": 0.0989, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.010676156583629894, |
|
"grad_norm": 3.6056294066863916, |
|
"learning_rate": 9.999296928563868e-06, |
|
"loss": 0.1658, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.012455516014234875, |
|
"grad_norm": 2.8940222323195064, |
|
"learning_rate": 9.999043049755216e-06, |
|
"loss": 0.1061, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.014234875444839857, |
|
"grad_norm": 1.6866495862880195, |
|
"learning_rate": 9.998750118008117e-06, |
|
"loss": 0.0708, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01601423487544484, |
|
"grad_norm": 2.4779753605767043, |
|
"learning_rate": 9.998418135610974e-06, |
|
"loss": 0.1082, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.017793594306049824, |
|
"grad_norm": 3.9004027062256013, |
|
"learning_rate": 9.998047105157265e-06, |
|
"loss": 0.1733, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.019572953736654804, |
|
"grad_norm": 1.7487394607464144, |
|
"learning_rate": 9.997637029545509e-06, |
|
"loss": 0.0766, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.021352313167259787, |
|
"grad_norm": 2.328909492993231, |
|
"learning_rate": 9.997187911979252e-06, |
|
"loss": 0.1011, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.023131672597864767, |
|
"grad_norm": 2.2899004888251207, |
|
"learning_rate": 9.996699755967035e-06, |
|
"loss": 0.1176, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.02491103202846975, |
|
"grad_norm": 2.229314172224569, |
|
"learning_rate": 9.996172565322375e-06, |
|
"loss": 0.1001, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.026690391459074734, |
|
"grad_norm": 2.1858407483759796, |
|
"learning_rate": 9.995606344163728e-06, |
|
"loss": 0.1158, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.028469750889679714, |
|
"grad_norm": 1.631331879240115, |
|
"learning_rate": 9.995001096914462e-06, |
|
"loss": 0.0756, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.030249110320284697, |
|
"grad_norm": 1.7186711853221304, |
|
"learning_rate": 9.994356828302818e-06, |
|
"loss": 0.0912, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03202846975088968, |
|
"grad_norm": 1.7398970710330903, |
|
"learning_rate": 9.993673543361874e-06, |
|
"loss": 0.0937, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.033807829181494664, |
|
"grad_norm": 2.260927685096165, |
|
"learning_rate": 9.992951247429512e-06, |
|
"loss": 0.133, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03558718861209965, |
|
"grad_norm": 1.8363203982587604, |
|
"learning_rate": 9.992189946148366e-06, |
|
"loss": 0.0915, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.037366548042704624, |
|
"grad_norm": 1.5070008583754404, |
|
"learning_rate": 9.991389645465786e-06, |
|
"loss": 0.0717, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03914590747330961, |
|
"grad_norm": 2.064763684550839, |
|
"learning_rate": 9.990550351633784e-06, |
|
"loss": 0.1183, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04092526690391459, |
|
"grad_norm": 1.9189332083201394, |
|
"learning_rate": 9.989672071208993e-06, |
|
"loss": 0.1178, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.042704626334519574, |
|
"grad_norm": 1.802850958660318, |
|
"learning_rate": 9.988754811052616e-06, |
|
"loss": 0.1139, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04448398576512456, |
|
"grad_norm": 2.3827939564927365, |
|
"learning_rate": 9.987798578330365e-06, |
|
"loss": 0.1311, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.046263345195729534, |
|
"grad_norm": 1.7019059083993373, |
|
"learning_rate": 9.986803380512406e-06, |
|
"loss": 0.0931, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.04804270462633452, |
|
"grad_norm": 2.036399457294326, |
|
"learning_rate": 9.98576922537331e-06, |
|
"loss": 0.1241, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0498220640569395, |
|
"grad_norm": 1.6459953265803846, |
|
"learning_rate": 9.984696120991979e-06, |
|
"loss": 0.096, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.051601423487544484, |
|
"grad_norm": 1.5434989485465862, |
|
"learning_rate": 9.983584075751598e-06, |
|
"loss": 0.0719, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.05338078291814947, |
|
"grad_norm": 1.8351532931841141, |
|
"learning_rate": 9.982433098339553e-06, |
|
"loss": 0.1052, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05516014234875445, |
|
"grad_norm": 2.3750451419476755, |
|
"learning_rate": 9.981243197747375e-06, |
|
"loss": 0.1378, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.05693950177935943, |
|
"grad_norm": 3.461439740539195, |
|
"learning_rate": 9.980014383270668e-06, |
|
"loss": 0.16, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.05871886120996441, |
|
"grad_norm": 1.9646930858280047, |
|
"learning_rate": 9.978746664509032e-06, |
|
"loss": 0.1032, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.060498220640569395, |
|
"grad_norm": 2.207144502356659, |
|
"learning_rate": 9.97744005136599e-06, |
|
"loss": 0.1071, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06227758007117438, |
|
"grad_norm": 1.558227494004109, |
|
"learning_rate": 9.976094554048912e-06, |
|
"loss": 0.0834, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06405693950177936, |
|
"grad_norm": 1.972789519211233, |
|
"learning_rate": 9.974710183068935e-06, |
|
"loss": 0.0901, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.06583629893238434, |
|
"grad_norm": 1.83295038500449, |
|
"learning_rate": 9.97328694924088e-06, |
|
"loss": 0.0966, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.06761565836298933, |
|
"grad_norm": 2.029052505138821, |
|
"learning_rate": 9.971824863683168e-06, |
|
"loss": 0.1099, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0693950177935943, |
|
"grad_norm": 2.2314550812500955, |
|
"learning_rate": 9.970323937817732e-06, |
|
"loss": 0.1365, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0711743772241993, |
|
"grad_norm": 2.0077574972760748, |
|
"learning_rate": 9.968784183369929e-06, |
|
"loss": 0.1107, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07295373665480427, |
|
"grad_norm": 1.8836316289336847, |
|
"learning_rate": 9.96720561236845e-06, |
|
"loss": 0.1146, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07473309608540925, |
|
"grad_norm": 2.2254852829156917, |
|
"learning_rate": 9.965588237145219e-06, |
|
"loss": 0.1325, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.07651245551601424, |
|
"grad_norm": 1.614223349729372, |
|
"learning_rate": 9.963932070335307e-06, |
|
"loss": 0.1038, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.07829181494661921, |
|
"grad_norm": 1.8530639376633644, |
|
"learning_rate": 9.962237124876828e-06, |
|
"loss": 0.1151, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.0800711743772242, |
|
"grad_norm": 1.9046402323143434, |
|
"learning_rate": 9.960503414010833e-06, |
|
"loss": 0.1133, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08185053380782918, |
|
"grad_norm": 1.7243256983039024, |
|
"learning_rate": 9.958730951281218e-06, |
|
"loss": 0.1071, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08362989323843416, |
|
"grad_norm": 1.416017660715855, |
|
"learning_rate": 9.956919750534607e-06, |
|
"loss": 0.0931, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.08540925266903915, |
|
"grad_norm": 2.0401062687819485, |
|
"learning_rate": 9.955069825920249e-06, |
|
"loss": 0.148, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.08718861209964412, |
|
"grad_norm": 1.8406971265956174, |
|
"learning_rate": 9.953181191889913e-06, |
|
"loss": 0.1265, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.08896797153024912, |
|
"grad_norm": 1.5111394852124147, |
|
"learning_rate": 9.95125386319776e-06, |
|
"loss": 0.1086, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09074733096085409, |
|
"grad_norm": 1.7863821281546215, |
|
"learning_rate": 9.949287854900243e-06, |
|
"loss": 0.1027, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.09252669039145907, |
|
"grad_norm": 1.7732430845866989, |
|
"learning_rate": 9.947283182355982e-06, |
|
"loss": 0.1168, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.09430604982206406, |
|
"grad_norm": 1.5267085405822918, |
|
"learning_rate": 9.945239861225644e-06, |
|
"loss": 0.0822, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.09608540925266904, |
|
"grad_norm": 1.872080277800805, |
|
"learning_rate": 9.943157907471825e-06, |
|
"loss": 0.1287, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.09786476868327403, |
|
"grad_norm": 1.887675032089796, |
|
"learning_rate": 9.941037337358918e-06, |
|
"loss": 0.1085, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.099644128113879, |
|
"grad_norm": 2.020773798517879, |
|
"learning_rate": 9.938878167452991e-06, |
|
"loss": 0.1098, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.10142348754448399, |
|
"grad_norm": 2.3210929011676678, |
|
"learning_rate": 9.936680414621663e-06, |
|
"loss": 0.1221, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.10320284697508897, |
|
"grad_norm": 1.657054635006968, |
|
"learning_rate": 9.934444096033958e-06, |
|
"loss": 0.0997, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.10498220640569395, |
|
"grad_norm": 1.536320251026234, |
|
"learning_rate": 9.932169229160183e-06, |
|
"loss": 0.1024, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.10676156583629894, |
|
"grad_norm": 1.8267522587458016, |
|
"learning_rate": 9.929855831771787e-06, |
|
"loss": 0.1084, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.10854092526690391, |
|
"grad_norm": 1.7520433454433173, |
|
"learning_rate": 9.927503921941218e-06, |
|
"loss": 0.1134, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.1103202846975089, |
|
"grad_norm": 1.8264518956649605, |
|
"learning_rate": 9.925113518041796e-06, |
|
"loss": 0.1248, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11209964412811388, |
|
"grad_norm": 2.01380780221982, |
|
"learning_rate": 9.922684638747551e-06, |
|
"loss": 0.1156, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.11387900355871886, |
|
"grad_norm": 1.6173109787905093, |
|
"learning_rate": 9.920217303033091e-06, |
|
"loss": 0.1033, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.11565836298932385, |
|
"grad_norm": 1.5574759955286475, |
|
"learning_rate": 9.917711530173444e-06, |
|
"loss": 0.1082, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.11743772241992882, |
|
"grad_norm": 2.377238008438504, |
|
"learning_rate": 9.91516733974392e-06, |
|
"loss": 0.1202, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.11921708185053381, |
|
"grad_norm": 1.6659733582424865, |
|
"learning_rate": 9.912584751619943e-06, |
|
"loss": 0.1026, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.12099644128113879, |
|
"grad_norm": 1.633068682704407, |
|
"learning_rate": 9.909963785976902e-06, |
|
"loss": 0.1205, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.12277580071174377, |
|
"grad_norm": 1.8121972425355344, |
|
"learning_rate": 9.907304463290004e-06, |
|
"loss": 0.0983, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.12455516014234876, |
|
"grad_norm": 1.552633126750543, |
|
"learning_rate": 9.904606804334094e-06, |
|
"loss": 0.0893, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12633451957295375, |
|
"grad_norm": 1.6590748732210991, |
|
"learning_rate": 9.901870830183506e-06, |
|
"loss": 0.1242, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.12811387900355872, |
|
"grad_norm": 1.4006470467192071, |
|
"learning_rate": 9.899096562211902e-06, |
|
"loss": 0.1071, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.1298932384341637, |
|
"grad_norm": 1.8073866382938693, |
|
"learning_rate": 9.896284022092088e-06, |
|
"loss": 0.1292, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.13167259786476868, |
|
"grad_norm": 2.093490355060369, |
|
"learning_rate": 9.893433231795864e-06, |
|
"loss": 0.1381, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.13345195729537365, |
|
"grad_norm": 2.163224450137612, |
|
"learning_rate": 9.890544213593838e-06, |
|
"loss": 0.1415, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13523131672597866, |
|
"grad_norm": 1.7785817881311663, |
|
"learning_rate": 9.887616990055262e-06, |
|
"loss": 0.1369, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.13701067615658363, |
|
"grad_norm": 1.7228423243185222, |
|
"learning_rate": 9.884651584047845e-06, |
|
"loss": 0.1202, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.1387900355871886, |
|
"grad_norm": 1.5425645854320849, |
|
"learning_rate": 9.881648018737587e-06, |
|
"loss": 0.1173, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.14056939501779359, |
|
"grad_norm": 1.5366521920589986, |
|
"learning_rate": 9.878606317588588e-06, |
|
"loss": 0.0922, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.1423487544483986, |
|
"grad_norm": 2.7420835605385077, |
|
"learning_rate": 9.875526504362868e-06, |
|
"loss": 0.1723, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14412811387900357, |
|
"grad_norm": 2.1599018725320063, |
|
"learning_rate": 9.872408603120187e-06, |
|
"loss": 0.1412, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.14590747330960854, |
|
"grad_norm": 1.7555022601591703, |
|
"learning_rate": 9.869252638217846e-06, |
|
"loss": 0.1112, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.14768683274021352, |
|
"grad_norm": 1.5801660337320023, |
|
"learning_rate": 9.866058634310503e-06, |
|
"loss": 0.1058, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.1494661921708185, |
|
"grad_norm": 1.8119041147672266, |
|
"learning_rate": 9.862826616349981e-06, |
|
"loss": 0.1285, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.1512455516014235, |
|
"grad_norm": 2.105260520520919, |
|
"learning_rate": 9.859556609585075e-06, |
|
"loss": 0.1356, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.15302491103202848, |
|
"grad_norm": 1.5555176722532245, |
|
"learning_rate": 9.856248639561346e-06, |
|
"loss": 0.1054, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.15480427046263345, |
|
"grad_norm": 1.7063970783128501, |
|
"learning_rate": 9.85290273212093e-06, |
|
"loss": 0.1042, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.15658362989323843, |
|
"grad_norm": 1.922866849278896, |
|
"learning_rate": 9.849518913402334e-06, |
|
"loss": 0.1273, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.1583629893238434, |
|
"grad_norm": 1.716566973412918, |
|
"learning_rate": 9.84609720984023e-06, |
|
"loss": 0.1169, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.1601423487544484, |
|
"grad_norm": 1.6351695724090016, |
|
"learning_rate": 9.84263764816525e-06, |
|
"loss": 0.0954, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1619217081850534, |
|
"grad_norm": 1.5582092040095523, |
|
"learning_rate": 9.839140255403776e-06, |
|
"loss": 0.0999, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.16370106761565836, |
|
"grad_norm": 1.6998552423673303, |
|
"learning_rate": 9.83560505887773e-06, |
|
"loss": 0.1114, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.16548042704626334, |
|
"grad_norm": 1.6925322546061474, |
|
"learning_rate": 9.83203208620436e-06, |
|
"loss": 0.1207, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.16725978647686832, |
|
"grad_norm": 1.9550111265487515, |
|
"learning_rate": 9.828421365296023e-06, |
|
"loss": 0.1106, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.16903914590747332, |
|
"grad_norm": 1.7439189383587073, |
|
"learning_rate": 9.824772924359974e-06, |
|
"loss": 0.1139, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.1708185053380783, |
|
"grad_norm": 1.650196039878736, |
|
"learning_rate": 9.821086791898133e-06, |
|
"loss": 0.109, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.17259786476868327, |
|
"grad_norm": 1.6010209944314775, |
|
"learning_rate": 9.817362996706872e-06, |
|
"loss": 0.121, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.17437722419928825, |
|
"grad_norm": 1.3594075108824626, |
|
"learning_rate": 9.81360156787679e-06, |
|
"loss": 0.0925, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.17615658362989323, |
|
"grad_norm": 1.3866290200408895, |
|
"learning_rate": 9.809802534792477e-06, |
|
"loss": 0.0932, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.17793594306049823, |
|
"grad_norm": 1.9070359465229236, |
|
"learning_rate": 9.805965927132294e-06, |
|
"loss": 0.131, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1797153024911032, |
|
"grad_norm": 1.6264091801172003, |
|
"learning_rate": 9.802091774868143e-06, |
|
"loss": 0.1048, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.18149466192170818, |
|
"grad_norm": 1.393315064800281, |
|
"learning_rate": 9.798180108265218e-06, |
|
"loss": 0.1073, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.18327402135231316, |
|
"grad_norm": 1.6966853242031756, |
|
"learning_rate": 9.794230957881785e-06, |
|
"loss": 0.1253, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.18505338078291814, |
|
"grad_norm": 1.8707738301198065, |
|
"learning_rate": 9.79024435456893e-06, |
|
"loss": 0.1154, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.18683274021352314, |
|
"grad_norm": 1.710515077434774, |
|
"learning_rate": 9.786220329470334e-06, |
|
"loss": 0.1104, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.18861209964412812, |
|
"grad_norm": 1.5775837752978192, |
|
"learning_rate": 9.782158914022011e-06, |
|
"loss": 0.1032, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.1903914590747331, |
|
"grad_norm": 1.554036824324644, |
|
"learning_rate": 9.778060139952075e-06, |
|
"loss": 0.0995, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.19217081850533807, |
|
"grad_norm": 1.8153768650978208, |
|
"learning_rate": 9.773924039280488e-06, |
|
"loss": 0.1326, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.19395017793594305, |
|
"grad_norm": 1.3306543473208419, |
|
"learning_rate": 9.769750644318814e-06, |
|
"loss": 0.097, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.19572953736654805, |
|
"grad_norm": 1.7441816579715657, |
|
"learning_rate": 9.765539987669956e-06, |
|
"loss": 0.1057, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.19750889679715303, |
|
"grad_norm": 1.7951773930673918, |
|
"learning_rate": 9.761292102227917e-06, |
|
"loss": 0.1327, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.199288256227758, |
|
"grad_norm": 1.9904306683709725, |
|
"learning_rate": 9.757007021177529e-06, |
|
"loss": 0.1289, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.20106761565836298, |
|
"grad_norm": 1.2549347662610835, |
|
"learning_rate": 9.752684777994197e-06, |
|
"loss": 0.0763, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.20284697508896798, |
|
"grad_norm": 1.7838669941265637, |
|
"learning_rate": 9.748325406443647e-06, |
|
"loss": 0.1094, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.20462633451957296, |
|
"grad_norm": 1.759708455606992, |
|
"learning_rate": 9.743928940581646e-06, |
|
"loss": 0.1254, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.20640569395017794, |
|
"grad_norm": 1.5543956067146134, |
|
"learning_rate": 9.739495414753754e-06, |
|
"loss": 0.1026, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.20818505338078291, |
|
"grad_norm": 1.4473907853138583, |
|
"learning_rate": 9.73502486359504e-06, |
|
"loss": 0.1039, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.2099644128113879, |
|
"grad_norm": 1.8502016581382927, |
|
"learning_rate": 9.73051732202982e-06, |
|
"loss": 0.1118, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.2117437722419929, |
|
"grad_norm": 1.5523661421978434, |
|
"learning_rate": 9.725972825271381e-06, |
|
"loss": 0.1066, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.21352313167259787, |
|
"grad_norm": 1.8138644564512383, |
|
"learning_rate": 9.721391408821713e-06, |
|
"loss": 0.1175, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.21530249110320285, |
|
"grad_norm": 1.4672240174888738, |
|
"learning_rate": 9.716773108471213e-06, |
|
"loss": 0.1083, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.21708185053380782, |
|
"grad_norm": 1.9245048641348923, |
|
"learning_rate": 9.712117960298433e-06, |
|
"loss": 0.131, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.2188612099644128, |
|
"grad_norm": 1.7369046092167046, |
|
"learning_rate": 9.707426000669773e-06, |
|
"loss": 0.1196, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.2206405693950178, |
|
"grad_norm": 1.6888793736399235, |
|
"learning_rate": 9.702697266239211e-06, |
|
"loss": 0.1115, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.22241992882562278, |
|
"grad_norm": 2.0175978902376777, |
|
"learning_rate": 9.697931793948012e-06, |
|
"loss": 0.1328, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.22419928825622776, |
|
"grad_norm": 1.536156238833876, |
|
"learning_rate": 9.693129621024441e-06, |
|
"loss": 0.1073, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.22597864768683273, |
|
"grad_norm": 1.7939356444045214, |
|
"learning_rate": 9.68829078498347e-06, |
|
"loss": 0.1153, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.2277580071174377, |
|
"grad_norm": 1.7881937999161266, |
|
"learning_rate": 9.683415323626487e-06, |
|
"loss": 0.1209, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.22953736654804271, |
|
"grad_norm": 2.091273091326845, |
|
"learning_rate": 9.678503275040997e-06, |
|
"loss": 0.1447, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.2313167259786477, |
|
"grad_norm": 1.4988172373838966, |
|
"learning_rate": 9.673554677600336e-06, |
|
"loss": 0.0956, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.23309608540925267, |
|
"grad_norm": 1.6599554084985895, |
|
"learning_rate": 9.668569569963355e-06, |
|
"loss": 0.1065, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.23487544483985764, |
|
"grad_norm": 1.3899503557882722, |
|
"learning_rate": 9.663547991074129e-06, |
|
"loss": 0.0993, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.23665480427046262, |
|
"grad_norm": 1.6910247071625581, |
|
"learning_rate": 9.658489980161643e-06, |
|
"loss": 0.1145, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.23843416370106763, |
|
"grad_norm": 1.8451310724917092, |
|
"learning_rate": 9.653395576739504e-06, |
|
"loss": 0.1321, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.2402135231316726, |
|
"grad_norm": 1.9822484269933685, |
|
"learning_rate": 9.648264820605611e-06, |
|
"loss": 0.1189, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.24199288256227758, |
|
"grad_norm": 1.3656807845608558, |
|
"learning_rate": 9.643097751841854e-06, |
|
"loss": 0.1013, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.24377224199288255, |
|
"grad_norm": 1.6321020962474615, |
|
"learning_rate": 9.637894410813803e-06, |
|
"loss": 0.122, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.24555160142348753, |
|
"grad_norm": 1.8663426074271083, |
|
"learning_rate": 9.632654838170393e-06, |
|
"loss": 0.11, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.24733096085409254, |
|
"grad_norm": 1.5281319032491034, |
|
"learning_rate": 9.627379074843595e-06, |
|
"loss": 0.0953, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.2491103202846975, |
|
"grad_norm": 1.745760459949664, |
|
"learning_rate": 9.622067162048111e-06, |
|
"loss": 0.1232, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2508896797153025, |
|
"grad_norm": 1.2512836650149644, |
|
"learning_rate": 9.616719141281044e-06, |
|
"loss": 0.0978, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.2526690391459075, |
|
"grad_norm": 1.9189936101250713, |
|
"learning_rate": 9.611335054321576e-06, |
|
"loss": 0.1399, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.25444839857651247, |
|
"grad_norm": 1.899603639348915, |
|
"learning_rate": 9.605914943230637e-06, |
|
"loss": 0.1208, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.25622775800711745, |
|
"grad_norm": 1.3124596225185126, |
|
"learning_rate": 9.600458850350588e-06, |
|
"loss": 0.1143, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.2580071174377224, |
|
"grad_norm": 1.7776891753251993, |
|
"learning_rate": 9.594966818304875e-06, |
|
"loss": 0.1299, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.2597864768683274, |
|
"grad_norm": 1.393804418472495, |
|
"learning_rate": 9.589438889997712e-06, |
|
"loss": 0.0921, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.2615658362989324, |
|
"grad_norm": 1.5411224511644097, |
|
"learning_rate": 9.583875108613727e-06, |
|
"loss": 0.0922, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.26334519572953735, |
|
"grad_norm": 1.7949884121245645, |
|
"learning_rate": 9.578275517617646e-06, |
|
"loss": 0.1181, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.26512455516014233, |
|
"grad_norm": 1.9252535437458735, |
|
"learning_rate": 9.572640160753936e-06, |
|
"loss": 0.126, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.2669039145907473, |
|
"grad_norm": 1.4649633142786056, |
|
"learning_rate": 9.566969082046471e-06, |
|
"loss": 0.108, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.26868327402135234, |
|
"grad_norm": 1.3589942118197844, |
|
"learning_rate": 9.561262325798188e-06, |
|
"loss": 0.0786, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.2704626334519573, |
|
"grad_norm": 1.3921273766501354, |
|
"learning_rate": 9.555519936590739e-06, |
|
"loss": 0.1052, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2722419928825623, |
|
"grad_norm": 1.6663353983769753, |
|
"learning_rate": 9.549741959284147e-06, |
|
"loss": 0.121, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.27402135231316727, |
|
"grad_norm": 1.5784058329778665, |
|
"learning_rate": 9.543928439016445e-06, |
|
"loss": 0.1032, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.27580071174377224, |
|
"grad_norm": 1.9484643243611615, |
|
"learning_rate": 9.538079421203339e-06, |
|
"loss": 0.1209, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2775800711743772, |
|
"grad_norm": 1.728061588680451, |
|
"learning_rate": 9.532194951537838e-06, |
|
"loss": 0.1294, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2793594306049822, |
|
"grad_norm": 1.6812120867800282, |
|
"learning_rate": 9.52627507598991e-06, |
|
"loss": 0.1241, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.28113879003558717, |
|
"grad_norm": 1.3290510617853026, |
|
"learning_rate": 9.52031984080611e-06, |
|
"loss": 0.091, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.28291814946619215, |
|
"grad_norm": 1.5109036702835381, |
|
"learning_rate": 9.514329292509227e-06, |
|
"loss": 0.1106, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.2846975088967972, |
|
"grad_norm": 1.7506211187729894, |
|
"learning_rate": 9.508303477897925e-06, |
|
"loss": 0.1017, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.28647686832740216, |
|
"grad_norm": 1.9207145796874332, |
|
"learning_rate": 9.502242444046365e-06, |
|
"loss": 0.1296, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.28825622775800713, |
|
"grad_norm": 1.4111460177802766, |
|
"learning_rate": 9.496146238303846e-06, |
|
"loss": 0.1006, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.2900355871886121, |
|
"grad_norm": 1.794920234383905, |
|
"learning_rate": 9.49001490829443e-06, |
|
"loss": 0.1023, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.2918149466192171, |
|
"grad_norm": 10.429176212007441, |
|
"learning_rate": 9.483848501916578e-06, |
|
"loss": 0.1737, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.29359430604982206, |
|
"grad_norm": 1.9160735755532239, |
|
"learning_rate": 9.477647067342766e-06, |
|
"loss": 0.1356, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.29537366548042704, |
|
"grad_norm": 1.5931676561338608, |
|
"learning_rate": 9.471410653019115e-06, |
|
"loss": 0.1125, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.297153024911032, |
|
"grad_norm": 1.5432416422243518, |
|
"learning_rate": 9.46513930766501e-06, |
|
"loss": 0.1174, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.298932384341637, |
|
"grad_norm": 1.7603854553065281, |
|
"learning_rate": 9.458833080272723e-06, |
|
"loss": 0.1204, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.30071174377224197, |
|
"grad_norm": 1.6947380806395196, |
|
"learning_rate": 9.45249202010702e-06, |
|
"loss": 0.1118, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.302491103202847, |
|
"grad_norm": 2.2112216327052447, |
|
"learning_rate": 9.446116176704791e-06, |
|
"loss": 0.1444, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.304270462633452, |
|
"grad_norm": 1.5760103682201312, |
|
"learning_rate": 9.439705599874653e-06, |
|
"loss": 0.1052, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.30604982206405695, |
|
"grad_norm": 1.8276793674036889, |
|
"learning_rate": 9.433260339696564e-06, |
|
"loss": 0.1317, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.30782918149466193, |
|
"grad_norm": 1.6443157785270113, |
|
"learning_rate": 9.426780446521429e-06, |
|
"loss": 0.1241, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.3096085409252669, |
|
"grad_norm": 1.6280585084984283, |
|
"learning_rate": 9.42026597097071e-06, |
|
"loss": 0.1344, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.3113879003558719, |
|
"grad_norm": 1.2799034083887577, |
|
"learning_rate": 9.413716963936033e-06, |
|
"loss": 0.092, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.31316725978647686, |
|
"grad_norm": 1.437488742443067, |
|
"learning_rate": 9.407133476578778e-06, |
|
"loss": 0.1036, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.31494661921708184, |
|
"grad_norm": 2.1960014898007265, |
|
"learning_rate": 9.400515560329698e-06, |
|
"loss": 0.1696, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.3167259786476868, |
|
"grad_norm": 1.4723195572278132, |
|
"learning_rate": 9.393863266888501e-06, |
|
"loss": 0.0834, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.3185053380782918, |
|
"grad_norm": 1.947514812040663, |
|
"learning_rate": 9.387176648223457e-06, |
|
"loss": 0.1405, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.3202846975088968, |
|
"grad_norm": 2.149461979889829, |
|
"learning_rate": 9.38045575657098e-06, |
|
"loss": 0.1186, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3220640569395018, |
|
"grad_norm": 1.9505903137388936, |
|
"learning_rate": 9.37370064443524e-06, |
|
"loss": 0.13, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.3238434163701068, |
|
"grad_norm": 1.1495685189548674, |
|
"learning_rate": 9.366911364587726e-06, |
|
"loss": 0.0812, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.32562277580071175, |
|
"grad_norm": 1.8986057966424665, |
|
"learning_rate": 9.360087970066854e-06, |
|
"loss": 0.1081, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.3274021352313167, |
|
"grad_norm": 1.4536565155861156, |
|
"learning_rate": 9.353230514177553e-06, |
|
"loss": 0.0947, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.3291814946619217, |
|
"grad_norm": 1.510872103277092, |
|
"learning_rate": 9.346339050490832e-06, |
|
"loss": 0.1056, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.3309608540925267, |
|
"grad_norm": 1.9570661918723171, |
|
"learning_rate": 9.33941363284338e-06, |
|
"loss": 0.1381, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.33274021352313166, |
|
"grad_norm": 1.5280580401677237, |
|
"learning_rate": 9.332454315337129e-06, |
|
"loss": 0.1228, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.33451957295373663, |
|
"grad_norm": 1.4590590907062857, |
|
"learning_rate": 9.325461152338846e-06, |
|
"loss": 0.0968, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.33629893238434166, |
|
"grad_norm": 1.578879089486356, |
|
"learning_rate": 9.3184341984797e-06, |
|
"loss": 0.121, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.33807829181494664, |
|
"grad_norm": 1.7331214556856596, |
|
"learning_rate": 9.311373508654838e-06, |
|
"loss": 0.1179, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3398576512455516, |
|
"grad_norm": 1.51259786944359, |
|
"learning_rate": 9.30427913802295e-06, |
|
"loss": 0.1135, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.3416370106761566, |
|
"grad_norm": 2.0976192318950266, |
|
"learning_rate": 9.297151142005852e-06, |
|
"loss": 0.1405, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.34341637010676157, |
|
"grad_norm": 1.3855967935164284, |
|
"learning_rate": 9.289989576288035e-06, |
|
"loss": 0.0996, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.34519572953736655, |
|
"grad_norm": 1.8107581200522063, |
|
"learning_rate": 9.282794496816244e-06, |
|
"loss": 0.1229, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.3469750889679715, |
|
"grad_norm": 1.5810266522878282, |
|
"learning_rate": 9.27556595979904e-06, |
|
"loss": 0.1029, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.3487544483985765, |
|
"grad_norm": 1.9650223797670214, |
|
"learning_rate": 9.26830402170635e-06, |
|
"loss": 0.1053, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.3505338078291815, |
|
"grad_norm": 1.79090307802057, |
|
"learning_rate": 9.261008739269035e-06, |
|
"loss": 0.1122, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.35231316725978645, |
|
"grad_norm": 1.4309578862084305, |
|
"learning_rate": 9.253680169478448e-06, |
|
"loss": 0.104, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.3540925266903915, |
|
"grad_norm": 1.9187611684265784, |
|
"learning_rate": 9.246318369585983e-06, |
|
"loss": 0.1247, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.35587188612099646, |
|
"grad_norm": 2.0055477857058794, |
|
"learning_rate": 9.238923397102629e-06, |
|
"loss": 0.1387, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.35587188612099646, |
|
"eval_loss": 0.12523622810840607, |
|
"eval_runtime": 2.8916, |
|
"eval_samples_per_second": 15.908, |
|
"eval_steps_per_second": 4.15, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.35765124555160144, |
|
"grad_norm": 1.6881785065215231, |
|
"learning_rate": 9.231495309798525e-06, |
|
"loss": 0.107, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.3594306049822064, |
|
"grad_norm": 1.8161103560695104, |
|
"learning_rate": 9.224034165702506e-06, |
|
"loss": 0.1192, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.3612099644128114, |
|
"grad_norm": 1.0988619705244302, |
|
"learning_rate": 9.216540023101646e-06, |
|
"loss": 0.081, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.36298932384341637, |
|
"grad_norm": 1.676588274752007, |
|
"learning_rate": 9.209012940540806e-06, |
|
"loss": 0.117, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.36476868327402134, |
|
"grad_norm": 1.5561132361614747, |
|
"learning_rate": 9.20145297682218e-06, |
|
"loss": 0.117, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.3665480427046263, |
|
"grad_norm": 1.9327727077404893, |
|
"learning_rate": 9.193860191004833e-06, |
|
"loss": 0.1393, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.3683274021352313, |
|
"grad_norm": 1.6137330361749684, |
|
"learning_rate": 9.186234642404234e-06, |
|
"loss": 0.1265, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.3701067615658363, |
|
"grad_norm": 2.507126855867454, |
|
"learning_rate": 9.178576390591803e-06, |
|
"loss": 0.1238, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.3718861209964413, |
|
"grad_norm": 1.5716272370121294, |
|
"learning_rate": 9.170885495394435e-06, |
|
"loss": 0.1106, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.3736654804270463, |
|
"grad_norm": 2.059350985102143, |
|
"learning_rate": 9.16316201689404e-06, |
|
"loss": 0.1399, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.37544483985765126, |
|
"grad_norm": 1.7777935047648332, |
|
"learning_rate": 9.155406015427076e-06, |
|
"loss": 0.1252, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.37722419928825623, |
|
"grad_norm": 1.99358585791931, |
|
"learning_rate": 9.147617551584066e-06, |
|
"loss": 0.1394, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.3790035587188612, |
|
"grad_norm": 1.5837815270409656, |
|
"learning_rate": 9.139796686209135e-06, |
|
"loss": 0.1492, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.3807829181494662, |
|
"grad_norm": 1.3549128662750847, |
|
"learning_rate": 9.131943480399531e-06, |
|
"loss": 0.1016, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.38256227758007116, |
|
"grad_norm": 1.4518610826530913, |
|
"learning_rate": 9.124057995505148e-06, |
|
"loss": 0.1156, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.38434163701067614, |
|
"grad_norm": 2.278845923129916, |
|
"learning_rate": 9.11614029312805e-06, |
|
"loss": 0.1714, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.3861209964412811, |
|
"grad_norm": 1.8080579633165144, |
|
"learning_rate": 9.108190435121982e-06, |
|
"loss": 0.1279, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.3879003558718861, |
|
"grad_norm": 1.9998077271238923, |
|
"learning_rate": 9.100208483591892e-06, |
|
"loss": 0.1274, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.3896797153024911, |
|
"grad_norm": 1.863609572120421, |
|
"learning_rate": 9.092194500893448e-06, |
|
"loss": 0.1499, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.3914590747330961, |
|
"grad_norm": 1.3633929272123548, |
|
"learning_rate": 9.084148549632547e-06, |
|
"loss": 0.0915, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3932384341637011, |
|
"grad_norm": 1.7330455619844953, |
|
"learning_rate": 9.076070692664827e-06, |
|
"loss": 0.1324, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.39501779359430605, |
|
"grad_norm": 1.6363666111201705, |
|
"learning_rate": 9.067960993095176e-06, |
|
"loss": 0.1096, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.39679715302491103, |
|
"grad_norm": 1.7266623646930472, |
|
"learning_rate": 9.059819514277238e-06, |
|
"loss": 0.1265, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.398576512455516, |
|
"grad_norm": 1.9685600107514534, |
|
"learning_rate": 9.05164631981292e-06, |
|
"loss": 0.1387, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.400355871886121, |
|
"grad_norm": 1.7017334772562438, |
|
"learning_rate": 9.043441473551893e-06, |
|
"loss": 0.1325, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.40213523131672596, |
|
"grad_norm": 1.5484698280198101, |
|
"learning_rate": 9.035205039591099e-06, |
|
"loss": 0.0945, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.40391459074733094, |
|
"grad_norm": 2.542482195118052, |
|
"learning_rate": 9.02693708227424e-06, |
|
"loss": 0.1837, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.40569395017793597, |
|
"grad_norm": 1.447756172990177, |
|
"learning_rate": 9.018637666191284e-06, |
|
"loss": 0.1221, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.40747330960854095, |
|
"grad_norm": 1.767680047838017, |
|
"learning_rate": 9.010306856177958e-06, |
|
"loss": 0.1412, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.4092526690391459, |
|
"grad_norm": 1.4197996585858523, |
|
"learning_rate": 9.001944717315236e-06, |
|
"loss": 0.1089, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.4110320284697509, |
|
"grad_norm": 1.8538045037658262, |
|
"learning_rate": 8.993551314928846e-06, |
|
"loss": 0.1445, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.4128113879003559, |
|
"grad_norm": 1.4197050202647166, |
|
"learning_rate": 8.985126714588739e-06, |
|
"loss": 0.0924, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.41459074733096085, |
|
"grad_norm": 1.475944315311444, |
|
"learning_rate": 8.976670982108591e-06, |
|
"loss": 0.1154, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.41637010676156583, |
|
"grad_norm": 1.9028199110277753, |
|
"learning_rate": 8.968184183545285e-06, |
|
"loss": 0.1526, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.4181494661921708, |
|
"grad_norm": 1.687224449981269, |
|
"learning_rate": 8.959666385198396e-06, |
|
"loss": 0.1193, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.4199288256227758, |
|
"grad_norm": 1.2903987746247663, |
|
"learning_rate": 8.951117653609666e-06, |
|
"loss": 0.0955, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.42170818505338076, |
|
"grad_norm": 1.898092360754091, |
|
"learning_rate": 8.9425380555625e-06, |
|
"loss": 0.1314, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.4234875444839858, |
|
"grad_norm": 1.0928077695916414, |
|
"learning_rate": 8.933927658081423e-06, |
|
"loss": 0.0794, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.42526690391459077, |
|
"grad_norm": 1.4844169431215455, |
|
"learning_rate": 8.925286528431578e-06, |
|
"loss": 0.1019, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.42704626334519574, |
|
"grad_norm": 1.5919916438585953, |
|
"learning_rate": 8.916614734118184e-06, |
|
"loss": 0.1061, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.4288256227758007, |
|
"grad_norm": 1.245738961920039, |
|
"learning_rate": 8.907912342886016e-06, |
|
"loss": 0.0796, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.4306049822064057, |
|
"grad_norm": 1.9549929143965474, |
|
"learning_rate": 8.899179422718877e-06, |
|
"loss": 0.1159, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.43238434163701067, |
|
"grad_norm": 1.5638697746045072, |
|
"learning_rate": 8.890416041839061e-06, |
|
"loss": 0.0908, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.43416370106761565, |
|
"grad_norm": 1.63197217924718, |
|
"learning_rate": 8.881622268706825e-06, |
|
"loss": 0.1131, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.4359430604982206, |
|
"grad_norm": 1.5136377317040546, |
|
"learning_rate": 8.872798172019856e-06, |
|
"loss": 0.1026, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.4377224199288256, |
|
"grad_norm": 1.5285390867638344, |
|
"learning_rate": 8.863943820712726e-06, |
|
"loss": 0.1165, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.4395017793594306, |
|
"grad_norm": 1.633255288300286, |
|
"learning_rate": 8.855059283956363e-06, |
|
"loss": 0.132, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.4412811387900356, |
|
"grad_norm": 2.0748893650156583, |
|
"learning_rate": 8.8461446311575e-06, |
|
"loss": 0.1417, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.4430604982206406, |
|
"grad_norm": 1.650141048246044, |
|
"learning_rate": 8.837199931958147e-06, |
|
"loss": 0.1053, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.44483985765124556, |
|
"grad_norm": 1.6401613443919212, |
|
"learning_rate": 8.828225256235035e-06, |
|
"loss": 0.1106, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.44661921708185054, |
|
"grad_norm": 1.9048606396043968, |
|
"learning_rate": 8.819220674099074e-06, |
|
"loss": 0.1215, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.4483985765124555, |
|
"grad_norm": 1.471661964913618, |
|
"learning_rate": 8.810186255894804e-06, |
|
"loss": 0.0951, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.4501779359430605, |
|
"grad_norm": 1.836537479041308, |
|
"learning_rate": 8.801122072199848e-06, |
|
"loss": 0.1247, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.45195729537366547, |
|
"grad_norm": 1.7248368098528992, |
|
"learning_rate": 8.792028193824364e-06, |
|
"loss": 0.113, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.45373665480427045, |
|
"grad_norm": 1.5751640400386206, |
|
"learning_rate": 8.782904691810478e-06, |
|
"loss": 0.1047, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.4555160142348754, |
|
"grad_norm": 1.5049229715454742, |
|
"learning_rate": 8.77375163743175e-06, |
|
"loss": 0.0992, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.45729537366548045, |
|
"grad_norm": 1.749805952016954, |
|
"learning_rate": 8.764569102192593e-06, |
|
"loss": 0.149, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.45907473309608543, |
|
"grad_norm": 1.4124031280100837, |
|
"learning_rate": 8.755357157827735e-06, |
|
"loss": 0.098, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.4608540925266904, |
|
"grad_norm": 1.985600035437136, |
|
"learning_rate": 8.746115876301651e-06, |
|
"loss": 0.1422, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.4626334519572954, |
|
"grad_norm": 1.6280813993714345, |
|
"learning_rate": 8.736845329807994e-06, |
|
"loss": 0.1046, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.46441281138790036, |
|
"grad_norm": 1.9011934392392438, |
|
"learning_rate": 8.727545590769044e-06, |
|
"loss": 0.1303, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.46619217081850534, |
|
"grad_norm": 1.6846882521779034, |
|
"learning_rate": 8.718216731835131e-06, |
|
"loss": 0.1042, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.4679715302491103, |
|
"grad_norm": 1.7749364615633472, |
|
"learning_rate": 8.708858825884075e-06, |
|
"loss": 0.1463, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.4697508896797153, |
|
"grad_norm": 1.6712445777849931, |
|
"learning_rate": 8.699471946020612e-06, |
|
"loss": 0.1278, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.47153024911032027, |
|
"grad_norm": 1.5526543705906133, |
|
"learning_rate": 8.690056165575825e-06, |
|
"loss": 0.1018, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.47330960854092524, |
|
"grad_norm": 1.3846288369734407, |
|
"learning_rate": 8.680611558106571e-06, |
|
"loss": 0.1063, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.4750889679715303, |
|
"grad_norm": 1.4193391775961521, |
|
"learning_rate": 8.671138197394907e-06, |
|
"loss": 0.0862, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.47686832740213525, |
|
"grad_norm": 1.2208639970641322, |
|
"learning_rate": 8.661636157447511e-06, |
|
"loss": 0.097, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.4786476868327402, |
|
"grad_norm": 2.2944961983246084, |
|
"learning_rate": 8.652105512495106e-06, |
|
"loss": 0.1224, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.4804270462633452, |
|
"grad_norm": 1.4785030484622754, |
|
"learning_rate": 8.64254633699188e-06, |
|
"loss": 0.1085, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.4822064056939502, |
|
"grad_norm": 1.7538538143478022, |
|
"learning_rate": 8.632958705614905e-06, |
|
"loss": 0.1217, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.48398576512455516, |
|
"grad_norm": 1.5291248193780271, |
|
"learning_rate": 8.623342693263549e-06, |
|
"loss": 0.1084, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.48576512455516013, |
|
"grad_norm": 1.6251282256925679, |
|
"learning_rate": 8.6136983750589e-06, |
|
"loss": 0.1361, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.4875444839857651, |
|
"grad_norm": 1.4261896610250897, |
|
"learning_rate": 8.604025826343167e-06, |
|
"loss": 0.1105, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.4893238434163701, |
|
"grad_norm": 1.687997412906706, |
|
"learning_rate": 8.594325122679107e-06, |
|
"loss": 0.1053, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.49110320284697506, |
|
"grad_norm": 1.8955994131272662, |
|
"learning_rate": 8.584596339849419e-06, |
|
"loss": 0.1681, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.4928825622775801, |
|
"grad_norm": 1.3983910184598523, |
|
"learning_rate": 8.574839553856157e-06, |
|
"loss": 0.0988, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.49466192170818507, |
|
"grad_norm": 1.9581104025641418, |
|
"learning_rate": 8.565054840920145e-06, |
|
"loss": 0.1408, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.49644128113879005, |
|
"grad_norm": 1.1049143258123566, |
|
"learning_rate": 8.55524227748037e-06, |
|
"loss": 0.0816, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.498220640569395, |
|
"grad_norm": 1.6176386932812334, |
|
"learning_rate": 8.545401940193392e-06, |
|
"loss": 0.0933, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.6259280127364453, |
|
"learning_rate": 8.535533905932739e-06, |
|
"loss": 0.1159, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.501779359430605, |
|
"grad_norm": 2.630898876815847, |
|
"learning_rate": 8.525638251788312e-06, |
|
"loss": 0.1696, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.50355871886121, |
|
"grad_norm": 1.6076894954193253, |
|
"learning_rate": 8.515715055065783e-06, |
|
"loss": 0.1435, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.505338078291815, |
|
"grad_norm": 1.6200430253611668, |
|
"learning_rate": 8.505764393285985e-06, |
|
"loss": 0.1215, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.5071174377224199, |
|
"grad_norm": 2.198817877961506, |
|
"learning_rate": 8.495786344184314e-06, |
|
"loss": 0.142, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.5088967971530249, |
|
"grad_norm": 1.507747423660467, |
|
"learning_rate": 8.485780985710113e-06, |
|
"loss": 0.1064, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.5106761565836299, |
|
"grad_norm": 1.5082808501703127, |
|
"learning_rate": 8.475748396026074e-06, |
|
"loss": 0.1128, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.5124555160142349, |
|
"grad_norm": 1.9264469777348672, |
|
"learning_rate": 8.46568865350762e-06, |
|
"loss": 0.1331, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.5142348754448398, |
|
"grad_norm": 1.9172622454379755, |
|
"learning_rate": 8.45560183674229e-06, |
|
"loss": 0.1248, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.5160142348754448, |
|
"grad_norm": 1.6187805508597053, |
|
"learning_rate": 8.445488024529133e-06, |
|
"loss": 0.1217, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.5177935943060499, |
|
"grad_norm": 1.568391683303928, |
|
"learning_rate": 8.435347295878087e-06, |
|
"loss": 0.1081, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.5195729537366548, |
|
"grad_norm": 1.4049959261950444, |
|
"learning_rate": 8.425179730009368e-06, |
|
"loss": 0.0905, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.5213523131672598, |
|
"grad_norm": 1.4287457463538664, |
|
"learning_rate": 8.41498540635284e-06, |
|
"loss": 0.1019, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.5231316725978647, |
|
"grad_norm": 1.6901645179912206, |
|
"learning_rate": 8.404764404547404e-06, |
|
"loss": 0.1131, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.5249110320284698, |
|
"grad_norm": 2.20102858311294, |
|
"learning_rate": 8.394516804440374e-06, |
|
"loss": 0.1571, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5266903914590747, |
|
"grad_norm": 1.7799066604360443, |
|
"learning_rate": 8.384242686086848e-06, |
|
"loss": 0.1421, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.5284697508896797, |
|
"grad_norm": 1.5273305118785383, |
|
"learning_rate": 8.373942129749094e-06, |
|
"loss": 0.1179, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.5302491103202847, |
|
"grad_norm": 1.1996616458459248, |
|
"learning_rate": 8.363615215895908e-06, |
|
"loss": 0.0905, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.5320284697508897, |
|
"grad_norm": 1.7493174012663681, |
|
"learning_rate": 8.353262025202e-06, |
|
"loss": 0.1045, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.5338078291814946, |
|
"grad_norm": 1.3935592241108374, |
|
"learning_rate": 8.342882638547351e-06, |
|
"loss": 0.0933, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5355871886120996, |
|
"grad_norm": 1.355810904444078, |
|
"learning_rate": 8.332477137016587e-06, |
|
"loss": 0.0954, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.5373665480427047, |
|
"grad_norm": 1.1873079418149766, |
|
"learning_rate": 8.322045601898354e-06, |
|
"loss": 0.0864, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.5391459074733096, |
|
"grad_norm": 1.2902174924175371, |
|
"learning_rate": 8.311588114684665e-06, |
|
"loss": 0.0954, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.5409252669039146, |
|
"grad_norm": 1.75512074904062, |
|
"learning_rate": 8.301104757070276e-06, |
|
"loss": 0.159, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.5427046263345195, |
|
"grad_norm": 1.593851866762387, |
|
"learning_rate": 8.290595610952045e-06, |
|
"loss": 0.1087, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.5444839857651246, |
|
"grad_norm": 2.109654517374457, |
|
"learning_rate": 8.280060758428294e-06, |
|
"loss": 0.1524, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.5462633451957295, |
|
"grad_norm": 1.1549538571848086, |
|
"learning_rate": 8.269500281798164e-06, |
|
"loss": 0.0848, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.5480427046263345, |
|
"grad_norm": 1.8556100914035978, |
|
"learning_rate": 8.258914263560971e-06, |
|
"loss": 0.1203, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.5498220640569395, |
|
"grad_norm": 1.6945392487029074, |
|
"learning_rate": 8.248302786415567e-06, |
|
"loss": 0.122, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.5516014234875445, |
|
"grad_norm": 1.861926716085086, |
|
"learning_rate": 8.237665933259693e-06, |
|
"loss": 0.1306, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5533807829181495, |
|
"grad_norm": 1.6946931932903524, |
|
"learning_rate": 8.227003787189323e-06, |
|
"loss": 0.1195, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.5551601423487544, |
|
"grad_norm": 1.213752105892572, |
|
"learning_rate": 8.216316431498028e-06, |
|
"loss": 0.0811, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.5569395017793595, |
|
"grad_norm": 1.4797557460998667, |
|
"learning_rate": 8.205603949676317e-06, |
|
"loss": 0.1186, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.5587188612099644, |
|
"grad_norm": 1.4790496301874914, |
|
"learning_rate": 8.194866425410984e-06, |
|
"loss": 0.1211, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.5604982206405694, |
|
"grad_norm": 1.604301494211466, |
|
"learning_rate": 8.184103942584456e-06, |
|
"loss": 0.1109, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.5622775800711743, |
|
"grad_norm": 1.5291947292283778, |
|
"learning_rate": 8.173316585274144e-06, |
|
"loss": 0.1115, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.5640569395017794, |
|
"grad_norm": 1.6255840518044504, |
|
"learning_rate": 8.162504437751775e-06, |
|
"loss": 0.1123, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.5658362989323843, |
|
"grad_norm": 1.4009559781220418, |
|
"learning_rate": 8.151667584482742e-06, |
|
"loss": 0.1033, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.5676156583629893, |
|
"grad_norm": 1.7716857155233592, |
|
"learning_rate": 8.140806110125442e-06, |
|
"loss": 0.128, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.5693950177935944, |
|
"grad_norm": 1.412429254330522, |
|
"learning_rate": 8.129920099530608e-06, |
|
"loss": 0.1139, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5711743772241993, |
|
"grad_norm": 1.140898234267356, |
|
"learning_rate": 8.119009637740663e-06, |
|
"loss": 0.0785, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.5729537366548043, |
|
"grad_norm": 1.3330593757161735, |
|
"learning_rate": 8.108074809989032e-06, |
|
"loss": 0.107, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.5747330960854092, |
|
"grad_norm": 1.7623416727715555, |
|
"learning_rate": 8.097115701699498e-06, |
|
"loss": 0.1103, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.5765124555160143, |
|
"grad_norm": 1.6724815098636745, |
|
"learning_rate": 8.086132398485525e-06, |
|
"loss": 0.1247, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.5782918149466192, |
|
"grad_norm": 0.9900202337091668, |
|
"learning_rate": 8.075124986149583e-06, |
|
"loss": 0.0745, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.5800711743772242, |
|
"grad_norm": 1.5732301540148983, |
|
"learning_rate": 8.064093550682494e-06, |
|
"loss": 0.1054, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.5818505338078291, |
|
"grad_norm": 1.2818640337326512, |
|
"learning_rate": 8.053038178262742e-06, |
|
"loss": 0.0763, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.5836298932384342, |
|
"grad_norm": 1.5136675283064702, |
|
"learning_rate": 8.041958955255815e-06, |
|
"loss": 0.1136, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.5854092526690391, |
|
"grad_norm": 1.6782050952893226, |
|
"learning_rate": 8.030855968213518e-06, |
|
"loss": 0.1191, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.5871886120996441, |
|
"grad_norm": 2.010469616038683, |
|
"learning_rate": 8.019729303873307e-06, |
|
"loss": 0.1269, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.5889679715302492, |
|
"grad_norm": 1.3754612513536115, |
|
"learning_rate": 8.008579049157607e-06, |
|
"loss": 0.0918, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.5907473309608541, |
|
"grad_norm": 1.3777367586726934, |
|
"learning_rate": 7.99740529117313e-06, |
|
"loss": 0.0961, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.5925266903914591, |
|
"grad_norm": 1.5507434378552574, |
|
"learning_rate": 7.986208117210198e-06, |
|
"loss": 0.1163, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.594306049822064, |
|
"grad_norm": 1.6666304654188895, |
|
"learning_rate": 7.974987614742066e-06, |
|
"loss": 0.1179, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.5960854092526691, |
|
"grad_norm": 1.543773220344925, |
|
"learning_rate": 7.963743871424224e-06, |
|
"loss": 0.0975, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.597864768683274, |
|
"grad_norm": 1.6142985058688342, |
|
"learning_rate": 7.952476975093729e-06, |
|
"loss": 0.1114, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.599644128113879, |
|
"grad_norm": 1.9823739779760452, |
|
"learning_rate": 7.941187013768508e-06, |
|
"loss": 0.1371, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.6014234875444839, |
|
"grad_norm": 1.7974583736384098, |
|
"learning_rate": 7.929874075646673e-06, |
|
"loss": 0.1158, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.603202846975089, |
|
"grad_norm": 1.668304400307075, |
|
"learning_rate": 7.918538249105835e-06, |
|
"loss": 0.1044, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.604982206405694, |
|
"grad_norm": 1.9936201855053692, |
|
"learning_rate": 7.907179622702409e-06, |
|
"loss": 0.1367, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6067615658362989, |
|
"grad_norm": 1.2355567944814314, |
|
"learning_rate": 7.895798285170927e-06, |
|
"loss": 0.0883, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.608540925266904, |
|
"grad_norm": 1.4488859255916893, |
|
"learning_rate": 7.88439432542334e-06, |
|
"loss": 0.0996, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.6103202846975089, |
|
"grad_norm": 1.3181169063954352, |
|
"learning_rate": 7.872967832548327e-06, |
|
"loss": 0.1002, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.6120996441281139, |
|
"grad_norm": 1.8804904445026709, |
|
"learning_rate": 7.861518895810597e-06, |
|
"loss": 0.1578, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.6138790035587188, |
|
"grad_norm": 1.551399504007687, |
|
"learning_rate": 7.850047604650188e-06, |
|
"loss": 0.1209, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.6156583629893239, |
|
"grad_norm": 1.5486531647636719, |
|
"learning_rate": 7.838554048681783e-06, |
|
"loss": 0.1032, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.6174377224199288, |
|
"grad_norm": 1.8855178801122414, |
|
"learning_rate": 7.827038317693988e-06, |
|
"loss": 0.1374, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.6192170818505338, |
|
"grad_norm": 1.6716656047293972, |
|
"learning_rate": 7.815500501648654e-06, |
|
"loss": 0.1091, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.6209964412811388, |
|
"grad_norm": 1.6571405418199678, |
|
"learning_rate": 7.80394069068015e-06, |
|
"loss": 0.1267, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.6227758007117438, |
|
"grad_norm": 1.764808692097743, |
|
"learning_rate": 7.79235897509468e-06, |
|
"loss": 0.1216, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6245551601423488, |
|
"grad_norm": 1.784707724597118, |
|
"learning_rate": 7.780755445369563e-06, |
|
"loss": 0.1369, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.6263345195729537, |
|
"grad_norm": 2.060107901891134, |
|
"learning_rate": 7.769130192152538e-06, |
|
"loss": 0.1548, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.6281138790035588, |
|
"grad_norm": 1.7740612223440642, |
|
"learning_rate": 7.757483306261042e-06, |
|
"loss": 0.1347, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.6298932384341637, |
|
"grad_norm": 2.0225463906343673, |
|
"learning_rate": 7.745814878681516e-06, |
|
"loss": 0.1402, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.6316725978647687, |
|
"grad_norm": 1.4087462157041928, |
|
"learning_rate": 7.734125000568684e-06, |
|
"loss": 0.0935, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6334519572953736, |
|
"grad_norm": 1.400033250230002, |
|
"learning_rate": 7.722413763244837e-06, |
|
"loss": 0.1005, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.6352313167259787, |
|
"grad_norm": 1.105153136617715, |
|
"learning_rate": 7.710681258199136e-06, |
|
"loss": 0.0874, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.6370106761565836, |
|
"grad_norm": 1.5572474642180052, |
|
"learning_rate": 7.69892757708688e-06, |
|
"loss": 0.1151, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.6387900355871886, |
|
"grad_norm": 1.5551143395742022, |
|
"learning_rate": 7.687152811728799e-06, |
|
"loss": 0.1046, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.6405693950177936, |
|
"grad_norm": 1.4835083753644804, |
|
"learning_rate": 7.675357054110337e-06, |
|
"loss": 0.1048, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6423487544483986, |
|
"grad_norm": 1.5714877890011716, |
|
"learning_rate": 7.663540396380931e-06, |
|
"loss": 0.1053, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.6441281138790036, |
|
"grad_norm": 1.3806114544128887, |
|
"learning_rate": 7.651702930853287e-06, |
|
"loss": 0.1037, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.6459074733096085, |
|
"grad_norm": 2.215923657342268, |
|
"learning_rate": 7.639844750002668e-06, |
|
"loss": 0.1333, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.6476868327402135, |
|
"grad_norm": 1.4001273897744355, |
|
"learning_rate": 7.627965946466167e-06, |
|
"loss": 0.0978, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.6494661921708185, |
|
"grad_norm": 1.7306966585623116, |
|
"learning_rate": 7.616066613041977e-06, |
|
"loss": 0.1279, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.6512455516014235, |
|
"grad_norm": 1.9522800357488899, |
|
"learning_rate": 7.6041468426886785e-06, |
|
"loss": 0.1302, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.6530249110320284, |
|
"grad_norm": 1.1260963301606752, |
|
"learning_rate": 7.592206728524507e-06, |
|
"loss": 0.0649, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.6548042704626335, |
|
"grad_norm": 2.0022519581941327, |
|
"learning_rate": 7.580246363826621e-06, |
|
"loss": 0.1333, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.6565836298932385, |
|
"grad_norm": 2.0149008283538197, |
|
"learning_rate": 7.568265842030381e-06, |
|
"loss": 0.1489, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.6583629893238434, |
|
"grad_norm": 1.3809647381184196, |
|
"learning_rate": 7.556265256728618e-06, |
|
"loss": 0.1011, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6601423487544484, |
|
"grad_norm": 1.7971067801265364, |
|
"learning_rate": 7.544244701670894e-06, |
|
"loss": 0.1235, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.6619217081850534, |
|
"grad_norm": 1.4149095610618776, |
|
"learning_rate": 7.532204270762786e-06, |
|
"loss": 0.0979, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.6637010676156584, |
|
"grad_norm": 1.5835843823324067, |
|
"learning_rate": 7.520144058065133e-06, |
|
"loss": 0.0989, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.6654804270462633, |
|
"grad_norm": 1.5865223343878991, |
|
"learning_rate": 7.50806415779332e-06, |
|
"loss": 0.1, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.6672597864768683, |
|
"grad_norm": 1.706439450383063, |
|
"learning_rate": 7.495964664316525e-06, |
|
"loss": 0.1168, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.6690391459074733, |
|
"grad_norm": 1.8257375904959714, |
|
"learning_rate": 7.4838456721569975e-06, |
|
"loss": 0.1365, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.6708185053380783, |
|
"grad_norm": 1.2864259409818386, |
|
"learning_rate": 7.471707275989304e-06, |
|
"loss": 0.1014, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.6725978647686833, |
|
"grad_norm": 1.4876277910426732, |
|
"learning_rate": 7.459549570639602e-06, |
|
"loss": 0.1028, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.6743772241992882, |
|
"grad_norm": 1.9931072811869504, |
|
"learning_rate": 7.447372651084896e-06, |
|
"loss": 0.1169, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.6761565836298933, |
|
"grad_norm": 1.4643926421390652, |
|
"learning_rate": 7.435176612452286e-06, |
|
"loss": 0.09, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.6779359430604982, |
|
"grad_norm": 1.4212475885319689, |
|
"learning_rate": 7.4229615500182396e-06, |
|
"loss": 0.0877, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.6797153024911032, |
|
"grad_norm": 1.4648600146736628, |
|
"learning_rate": 7.4107275592078345e-06, |
|
"loss": 0.1012, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.6814946619217082, |
|
"grad_norm": 1.3949111987287208, |
|
"learning_rate": 7.398474735594022e-06, |
|
"loss": 0.0966, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.6832740213523132, |
|
"grad_norm": 1.737983092372598, |
|
"learning_rate": 7.386203174896872e-06, |
|
"loss": 0.1169, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.6850533807829181, |
|
"grad_norm": 1.956534849831809, |
|
"learning_rate": 7.373912972982838e-06, |
|
"loss": 0.0955, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.6868327402135231, |
|
"grad_norm": 1.4765841084425897, |
|
"learning_rate": 7.361604225863992e-06, |
|
"loss": 0.1047, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.6886120996441281, |
|
"grad_norm": 1.6853836298864953, |
|
"learning_rate": 7.349277029697287e-06, |
|
"loss": 0.1065, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.6903914590747331, |
|
"grad_norm": 1.7009026657313926, |
|
"learning_rate": 7.336931480783801e-06, |
|
"loss": 0.1273, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.6921708185053381, |
|
"grad_norm": 1.8452170336928666, |
|
"learning_rate": 7.3245676755679854e-06, |
|
"loss": 0.1145, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.693950177935943, |
|
"grad_norm": 1.6989163939473242, |
|
"learning_rate": 7.312185710636911e-06, |
|
"loss": 0.1209, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.6957295373665481, |
|
"grad_norm": 1.5452587910564195, |
|
"learning_rate": 7.299785682719512e-06, |
|
"loss": 0.1191, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.697508896797153, |
|
"grad_norm": 1.4732914117496372, |
|
"learning_rate": 7.287367688685835e-06, |
|
"loss": 0.1069, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.699288256227758, |
|
"grad_norm": 2.0015152006828734, |
|
"learning_rate": 7.274931825546279e-06, |
|
"loss": 0.1397, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.701067615658363, |
|
"grad_norm": 1.7538408296691694, |
|
"learning_rate": 7.262478190450834e-06, |
|
"loss": 0.1178, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.702846975088968, |
|
"grad_norm": 1.9848758062026528, |
|
"learning_rate": 7.250006880688332e-06, |
|
"loss": 0.1325, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.7046263345195729, |
|
"grad_norm": 1.8313087018693532, |
|
"learning_rate": 7.2375179936856775e-06, |
|
"loss": 0.1265, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.7064056939501779, |
|
"grad_norm": 1.4250883999734847, |
|
"learning_rate": 7.22501162700709e-06, |
|
"loss": 0.0998, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.708185053380783, |
|
"grad_norm": 1.3506194011919066, |
|
"learning_rate": 7.21248787835334e-06, |
|
"loss": 0.0819, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.7099644128113879, |
|
"grad_norm": 1.2045083958740928, |
|
"learning_rate": 7.199946845560994e-06, |
|
"loss": 0.0669, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.7117437722419929, |
|
"grad_norm": 1.854331631752082, |
|
"learning_rate": 7.1873886266016365e-06, |
|
"loss": 0.1212, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7117437722419929, |
|
"eval_loss": 0.116457499563694, |
|
"eval_runtime": 2.8488, |
|
"eval_samples_per_second": 16.147, |
|
"eval_steps_per_second": 4.212, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7135231316725978, |
|
"grad_norm": 1.3960764796715444, |
|
"learning_rate": 7.174813319581115e-06, |
|
"loss": 0.1066, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.7153024911032029, |
|
"grad_norm": 1.6205317224216729, |
|
"learning_rate": 7.162221022738768e-06, |
|
"loss": 0.0929, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.7170818505338078, |
|
"grad_norm": 1.5329977108845547, |
|
"learning_rate": 7.149611834446664e-06, |
|
"loss": 0.1026, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.7188612099644128, |
|
"grad_norm": 1.5712471988862724, |
|
"learning_rate": 7.136985853208824e-06, |
|
"loss": 0.1057, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.7206405693950177, |
|
"grad_norm": 1.5962917888172148, |
|
"learning_rate": 7.124343177660462e-06, |
|
"loss": 0.1038, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.7224199288256228, |
|
"grad_norm": 2.4027075784555953, |
|
"learning_rate": 7.111683906567206e-06, |
|
"loss": 0.1256, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.7241992882562278, |
|
"grad_norm": 1.7513432131126196, |
|
"learning_rate": 7.099008138824329e-06, |
|
"loss": 0.1076, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.7259786476868327, |
|
"grad_norm": 1.7480562391725012, |
|
"learning_rate": 7.086315973455982e-06, |
|
"loss": 0.1217, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.7277580071174378, |
|
"grad_norm": 1.6799596689323806, |
|
"learning_rate": 7.0736075096144084e-06, |
|
"loss": 0.1262, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.7295373665480427, |
|
"grad_norm": 1.4688124552376174, |
|
"learning_rate": 7.060882846579182e-06, |
|
"loss": 0.0854, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7313167259786477, |
|
"grad_norm": 1.4736364859940467, |
|
"learning_rate": 7.048142083756427e-06, |
|
"loss": 0.1042, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.7330960854092526, |
|
"grad_norm": 1.8134702131762355, |
|
"learning_rate": 7.035385320678035e-06, |
|
"loss": 0.1144, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.7348754448398577, |
|
"grad_norm": 1.9116502746386488, |
|
"learning_rate": 7.022612657000898e-06, |
|
"loss": 0.1342, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.7366548042704626, |
|
"grad_norm": 1.8791817739013332, |
|
"learning_rate": 7.0098241925061215e-06, |
|
"loss": 0.1276, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.7384341637010676, |
|
"grad_norm": 1.7698331086048267, |
|
"learning_rate": 6.997020027098249e-06, |
|
"loss": 0.1157, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.7402135231316725, |
|
"grad_norm": 1.5225335904923694, |
|
"learning_rate": 6.9842002608044844e-06, |
|
"loss": 0.1078, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.7419928825622776, |
|
"grad_norm": 1.4883945208906542, |
|
"learning_rate": 6.971364993773901e-06, |
|
"loss": 0.1072, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.7437722419928826, |
|
"grad_norm": 1.4491969080956943, |
|
"learning_rate": 6.958514326276669e-06, |
|
"loss": 0.0955, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.7455516014234875, |
|
"grad_norm": 1.4626329384153665, |
|
"learning_rate": 6.945648358703269e-06, |
|
"loss": 0.0944, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.7473309608540926, |
|
"grad_norm": 1.5830715989134705, |
|
"learning_rate": 6.932767191563703e-06, |
|
"loss": 0.111, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7491103202846975, |
|
"grad_norm": 1.2807490365152725, |
|
"learning_rate": 6.919870925486718e-06, |
|
"loss": 0.0777, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.7508896797153025, |
|
"grad_norm": 1.2820574667305715, |
|
"learning_rate": 6.906959661219011e-06, |
|
"loss": 0.092, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.7526690391459074, |
|
"grad_norm": 1.3830973589340227, |
|
"learning_rate": 6.8940334996244505e-06, |
|
"loss": 0.1026, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.7544483985765125, |
|
"grad_norm": 1.6493445416304913, |
|
"learning_rate": 6.881092541683279e-06, |
|
"loss": 0.102, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.7562277580071174, |
|
"grad_norm": 1.382992025592145, |
|
"learning_rate": 6.8681368884913345e-06, |
|
"loss": 0.0961, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.7580071174377224, |
|
"grad_norm": 1.6903350068742105, |
|
"learning_rate": 6.855166641259252e-06, |
|
"loss": 0.1197, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.7597864768683275, |
|
"grad_norm": 1.5194894285181493, |
|
"learning_rate": 6.8421819013116766e-06, |
|
"loss": 0.1251, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.7615658362989324, |
|
"grad_norm": 1.4575847201339305, |
|
"learning_rate": 6.829182770086474e-06, |
|
"loss": 0.1168, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.7633451957295374, |
|
"grad_norm": 1.6387689211024612, |
|
"learning_rate": 6.816169349133934e-06, |
|
"loss": 0.1123, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.7651245551601423, |
|
"grad_norm": 1.273813565801084, |
|
"learning_rate": 6.803141740115979e-06, |
|
"loss": 0.0751, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7669039145907474, |
|
"grad_norm": 1.9538136710341851, |
|
"learning_rate": 6.7901000448053676e-06, |
|
"loss": 0.133, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.7686832740213523, |
|
"grad_norm": 1.2247904168984738, |
|
"learning_rate": 6.777044365084907e-06, |
|
"loss": 0.0882, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.7704626334519573, |
|
"grad_norm": 1.3408231054600699, |
|
"learning_rate": 6.763974802946649e-06, |
|
"loss": 0.0871, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.7722419928825622, |
|
"grad_norm": 1.43698498808156, |
|
"learning_rate": 6.750891460491093e-06, |
|
"loss": 0.1229, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.7740213523131673, |
|
"grad_norm": 1.873683692002035, |
|
"learning_rate": 6.737794439926395e-06, |
|
"loss": 0.146, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.7758007117437722, |
|
"grad_norm": 1.7624833174824797, |
|
"learning_rate": 6.724683843567567e-06, |
|
"loss": 0.1157, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.7775800711743772, |
|
"grad_norm": 1.800324978478135, |
|
"learning_rate": 6.711559773835672e-06, |
|
"loss": 0.1101, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.7793594306049823, |
|
"grad_norm": 1.601926138333465, |
|
"learning_rate": 6.69842233325703e-06, |
|
"loss": 0.1076, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.7811387900355872, |
|
"grad_norm": 1.756530019144822, |
|
"learning_rate": 6.685271624462416e-06, |
|
"loss": 0.1262, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.7829181494661922, |
|
"grad_norm": 1.478085867196419, |
|
"learning_rate": 6.672107750186255e-06, |
|
"loss": 0.0901, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.7846975088967971, |
|
"grad_norm": 1.0438162768610115, |
|
"learning_rate": 6.658930813265825e-06, |
|
"loss": 0.0671, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.7864768683274022, |
|
"grad_norm": 1.4855178126399633, |
|
"learning_rate": 6.645740916640449e-06, |
|
"loss": 0.1135, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.7882562277580071, |
|
"grad_norm": 1.2845505262887242, |
|
"learning_rate": 6.63253816335069e-06, |
|
"loss": 0.08, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.7900355871886121, |
|
"grad_norm": 1.503932190439218, |
|
"learning_rate": 6.619322656537552e-06, |
|
"loss": 0.0976, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.791814946619217, |
|
"grad_norm": 1.507055257241038, |
|
"learning_rate": 6.606094499441671e-06, |
|
"loss": 0.1039, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.7935943060498221, |
|
"grad_norm": 1.5769242920507418, |
|
"learning_rate": 6.592853795402502e-06, |
|
"loss": 0.1103, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.7953736654804271, |
|
"grad_norm": 1.4449697638181271, |
|
"learning_rate": 6.579600647857525e-06, |
|
"loss": 0.0883, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.797153024911032, |
|
"grad_norm": 2.0479855231121618, |
|
"learning_rate": 6.566335160341425e-06, |
|
"loss": 0.1279, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.798932384341637, |
|
"grad_norm": 1.2553749124806357, |
|
"learning_rate": 6.553057436485289e-06, |
|
"loss": 0.094, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.800711743772242, |
|
"grad_norm": 1.4273835428516042, |
|
"learning_rate": 6.539767580015799e-06, |
|
"loss": 0.1205, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.802491103202847, |
|
"grad_norm": 2.2176546518282736, |
|
"learning_rate": 6.52646569475441e-06, |
|
"loss": 0.1252, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.8042704626334519, |
|
"grad_norm": 1.537333393190635, |
|
"learning_rate": 6.513151884616556e-06, |
|
"loss": 0.1112, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.806049822064057, |
|
"grad_norm": 2.7086615895770296, |
|
"learning_rate": 6.499826253610823e-06, |
|
"loss": 0.1263, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.8078291814946619, |
|
"grad_norm": 1.4894128319823599, |
|
"learning_rate": 6.486488905838143e-06, |
|
"loss": 0.0987, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.8096085409252669, |
|
"grad_norm": 1.5631238939029113, |
|
"learning_rate": 6.473139945490984e-06, |
|
"loss": 0.0927, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.8113879003558719, |
|
"grad_norm": 1.703111317717426, |
|
"learning_rate": 6.459779476852528e-06, |
|
"loss": 0.0975, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.8131672597864769, |
|
"grad_norm": 1.435933544182716, |
|
"learning_rate": 6.446407604295863e-06, |
|
"loss": 0.1072, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.8149466192170819, |
|
"grad_norm": 1.4952825715855165, |
|
"learning_rate": 6.433024432283169e-06, |
|
"loss": 0.0911, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.8167259786476868, |
|
"grad_norm": 1.3759953157821836, |
|
"learning_rate": 6.41963006536489e-06, |
|
"loss": 0.0885, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.8185053380782918, |
|
"grad_norm": 1.6472861723307277, |
|
"learning_rate": 6.4062246081789316e-06, |
|
"loss": 0.1157, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.8202846975088968, |
|
"grad_norm": 1.237140662909847, |
|
"learning_rate": 6.392808165449836e-06, |
|
"loss": 0.0786, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.8220640569395018, |
|
"grad_norm": 1.5556573531023443, |
|
"learning_rate": 6.379380841987965e-06, |
|
"loss": 0.11, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.8238434163701067, |
|
"grad_norm": 1.7198728275246784, |
|
"learning_rate": 6.365942742688684e-06, |
|
"loss": 0.0979, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.8256227758007118, |
|
"grad_norm": 1.6389544576520083, |
|
"learning_rate": 6.352493972531535e-06, |
|
"loss": 0.1067, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.8274021352313167, |
|
"grad_norm": 1.8158974903235148, |
|
"learning_rate": 6.339034636579425e-06, |
|
"loss": 0.122, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.8291814946619217, |
|
"grad_norm": 1.3443223269237963, |
|
"learning_rate": 6.325564839977802e-06, |
|
"loss": 0.088, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.8309608540925267, |
|
"grad_norm": 1.2649013594127572, |
|
"learning_rate": 6.312084687953835e-06, |
|
"loss": 0.0879, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.8327402135231317, |
|
"grad_norm": 1.9210728605324108, |
|
"learning_rate": 6.298594285815585e-06, |
|
"loss": 0.1344, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.8345195729537367, |
|
"grad_norm": 1.5552844715502996, |
|
"learning_rate": 6.2850937389511936e-06, |
|
"loss": 0.1218, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.8362989323843416, |
|
"grad_norm": 1.4371080660863236, |
|
"learning_rate": 6.271583152828049e-06, |
|
"loss": 0.0915, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8380782918149466, |
|
"grad_norm": 1.5902541391868361, |
|
"learning_rate": 6.258062632991972e-06, |
|
"loss": 0.0977, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.8398576512455516, |
|
"grad_norm": 1.38108075558522, |
|
"learning_rate": 6.244532285066382e-06, |
|
"loss": 0.0892, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.8416370106761566, |
|
"grad_norm": 1.1488286793264708, |
|
"learning_rate": 6.2309922147514775e-06, |
|
"loss": 0.078, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.8434163701067615, |
|
"grad_norm": 1.6397289213287805, |
|
"learning_rate": 6.2174425278234115e-06, |
|
"loss": 0.1394, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.8451957295373665, |
|
"grad_norm": 1.6351543813793374, |
|
"learning_rate": 6.20388333013346e-06, |
|
"loss": 0.1054, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.8469750889679716, |
|
"grad_norm": 1.4585037139001893, |
|
"learning_rate": 6.190314727607196e-06, |
|
"loss": 0.1048, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.8487544483985765, |
|
"grad_norm": 1.9131631084667446, |
|
"learning_rate": 6.176736826243671e-06, |
|
"loss": 0.1328, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.8505338078291815, |
|
"grad_norm": 1.8344597703209329, |
|
"learning_rate": 6.163149732114571e-06, |
|
"loss": 0.127, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.8523131672597865, |
|
"grad_norm": 1.6713998606893898, |
|
"learning_rate": 6.149553551363404e-06, |
|
"loss": 0.1061, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.8540925266903915, |
|
"grad_norm": 1.4594029813155798, |
|
"learning_rate": 6.1359483902046605e-06, |
|
"loss": 0.0942, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.8558718861209964, |
|
"grad_norm": 1.613048626080875, |
|
"learning_rate": 6.122334354922984e-06, |
|
"loss": 0.1105, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.8576512455516014, |
|
"grad_norm": 1.559407146526027, |
|
"learning_rate": 6.108711551872347e-06, |
|
"loss": 0.1168, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.8594306049822064, |
|
"grad_norm": 1.9666482107446406, |
|
"learning_rate": 6.095080087475218e-06, |
|
"loss": 0.1249, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.8612099644128114, |
|
"grad_norm": 1.4922175453612592, |
|
"learning_rate": 6.0814400682217236e-06, |
|
"loss": 0.1068, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.8629893238434164, |
|
"grad_norm": 1.3741296658731252, |
|
"learning_rate": 6.067791600668823e-06, |
|
"loss": 0.0731, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.8647686832740213, |
|
"grad_norm": 1.3059320927776805, |
|
"learning_rate": 6.054134791439479e-06, |
|
"loss": 0.08, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.8665480427046264, |
|
"grad_norm": 1.6151553056237768, |
|
"learning_rate": 6.040469747221815e-06, |
|
"loss": 0.1158, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.8683274021352313, |
|
"grad_norm": 1.5746698736863445, |
|
"learning_rate": 6.026796574768288e-06, |
|
"loss": 0.0916, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.8701067615658363, |
|
"grad_norm": 1.3874806316012922, |
|
"learning_rate": 6.013115380894854e-06, |
|
"loss": 0.0892, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.8718861209964412, |
|
"grad_norm": 1.0956742966118196, |
|
"learning_rate": 5.999426272480133e-06, |
|
"loss": 0.0682, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.8736654804270463, |
|
"grad_norm": 1.5432394148844597, |
|
"learning_rate": 5.985729356464575e-06, |
|
"loss": 0.1098, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.8754448398576512, |
|
"grad_norm": 1.4065810822698819, |
|
"learning_rate": 5.972024739849622e-06, |
|
"loss": 0.0973, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.8772241992882562, |
|
"grad_norm": 1.2879685686205695, |
|
"learning_rate": 5.958312529696874e-06, |
|
"loss": 0.0723, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.8790035587188612, |
|
"grad_norm": 1.4030077486225292, |
|
"learning_rate": 5.944592833127253e-06, |
|
"loss": 0.1102, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.8807829181494662, |
|
"grad_norm": 1.5732158749805603, |
|
"learning_rate": 5.9308657573201645e-06, |
|
"loss": 0.126, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.8825622775800712, |
|
"grad_norm": 1.5417129896638424, |
|
"learning_rate": 5.917131409512663e-06, |
|
"loss": 0.0932, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.8843416370106761, |
|
"grad_norm": 1.6388211529343981, |
|
"learning_rate": 5.903389896998611e-06, |
|
"loss": 0.0974, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.8861209964412812, |
|
"grad_norm": 1.5652593720522974, |
|
"learning_rate": 5.889641327127843e-06, |
|
"loss": 0.1027, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.8879003558718861, |
|
"grad_norm": 1.5115758620731554, |
|
"learning_rate": 5.875885807305326e-06, |
|
"loss": 0.0989, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.8896797153024911, |
|
"grad_norm": 1.4595332141986987, |
|
"learning_rate": 5.862123444990319e-06, |
|
"loss": 0.1051, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.891459074733096, |
|
"grad_norm": 1.4709936061770459, |
|
"learning_rate": 5.848354347695537e-06, |
|
"loss": 0.1139, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.8932384341637011, |
|
"grad_norm": 1.6873230435457072, |
|
"learning_rate": 5.83457862298631e-06, |
|
"loss": 0.1205, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.895017793594306, |
|
"grad_norm": 1.9941623400355295, |
|
"learning_rate": 5.8207963784797396e-06, |
|
"loss": 0.1104, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.896797153024911, |
|
"grad_norm": 1.4734190999178454, |
|
"learning_rate": 5.807007721843862e-06, |
|
"loss": 0.1273, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.8985765124555161, |
|
"grad_norm": 1.479376246734761, |
|
"learning_rate": 5.793212760796804e-06, |
|
"loss": 0.0933, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.900355871886121, |
|
"grad_norm": 1.7853395446056677, |
|
"learning_rate": 5.779411603105947e-06, |
|
"loss": 0.1054, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.902135231316726, |
|
"grad_norm": 1.426901695183018, |
|
"learning_rate": 5.765604356587076e-06, |
|
"loss": 0.0849, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.9039145907473309, |
|
"grad_norm": 1.3915437670831905, |
|
"learning_rate": 5.751791129103545e-06, |
|
"loss": 0.0965, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.905693950177936, |
|
"grad_norm": 1.8908189569799152, |
|
"learning_rate": 5.737972028565431e-06, |
|
"loss": 0.1348, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.9074733096085409, |
|
"grad_norm": 2.0123874647765256, |
|
"learning_rate": 5.7241471629286934e-06, |
|
"loss": 0.1369, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9092526690391459, |
|
"grad_norm": 1.7109732645349063, |
|
"learning_rate": 5.7103166401943276e-06, |
|
"loss": 0.1169, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.9110320284697508, |
|
"grad_norm": 1.976072814745251, |
|
"learning_rate": 5.696480568407523e-06, |
|
"loss": 0.1255, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.9128113879003559, |
|
"grad_norm": 1.9009591223454294, |
|
"learning_rate": 5.682639055656817e-06, |
|
"loss": 0.1244, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.9145907473309609, |
|
"grad_norm": 2.2623476186059515, |
|
"learning_rate": 5.668792210073255e-06, |
|
"loss": 0.1384, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.9163701067615658, |
|
"grad_norm": 1.6300736404246523, |
|
"learning_rate": 5.654940139829544e-06, |
|
"loss": 0.1104, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.9181494661921709, |
|
"grad_norm": 1.4571173349482662, |
|
"learning_rate": 5.641082953139201e-06, |
|
"loss": 0.0951, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.9199288256227758, |
|
"grad_norm": 1.237544595173587, |
|
"learning_rate": 5.6272207582557195e-06, |
|
"loss": 0.0867, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.9217081850533808, |
|
"grad_norm": 1.4894621940242225, |
|
"learning_rate": 5.61335366347171e-06, |
|
"loss": 0.0896, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.9234875444839857, |
|
"grad_norm": 1.418711132355023, |
|
"learning_rate": 5.599481777118071e-06, |
|
"loss": 0.1007, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.9252669039145908, |
|
"grad_norm": 1.493837253821444, |
|
"learning_rate": 5.585605207563124e-06, |
|
"loss": 0.0892, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.9270462633451957, |
|
"grad_norm": 1.4712756504396751, |
|
"learning_rate": 5.571724063211782e-06, |
|
"loss": 0.0924, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.9288256227758007, |
|
"grad_norm": 1.1592206399833151, |
|
"learning_rate": 5.557838452504692e-06, |
|
"loss": 0.0528, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.9306049822064056, |
|
"grad_norm": 1.920479782025393, |
|
"learning_rate": 5.5439484839173996e-06, |
|
"loss": 0.1121, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.9323843416370107, |
|
"grad_norm": 1.3930276138006012, |
|
"learning_rate": 5.530054265959486e-06, |
|
"loss": 0.0999, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.9341637010676157, |
|
"grad_norm": 1.5591719722207709, |
|
"learning_rate": 5.516155907173735e-06, |
|
"loss": 0.1038, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.9359430604982206, |
|
"grad_norm": 1.346087230332831, |
|
"learning_rate": 5.5022535161352764e-06, |
|
"loss": 0.0977, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.9377224199288257, |
|
"grad_norm": 1.7828507928590311, |
|
"learning_rate": 5.488347201450741e-06, |
|
"loss": 0.0998, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.9395017793594306, |
|
"grad_norm": 1.535438816955036, |
|
"learning_rate": 5.47443707175741e-06, |
|
"loss": 0.0987, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.9412811387900356, |
|
"grad_norm": 1.3838885859360288, |
|
"learning_rate": 5.46052323572237e-06, |
|
"loss": 0.0822, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.9430604982206405, |
|
"grad_norm": 1.5270906368516974, |
|
"learning_rate": 5.446605802041662e-06, |
|
"loss": 0.092, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.9448398576512456, |
|
"grad_norm": 1.380433638403094, |
|
"learning_rate": 5.432684879439428e-06, |
|
"loss": 0.0989, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.9466192170818505, |
|
"grad_norm": 1.9275980920605078, |
|
"learning_rate": 5.418760576667071e-06, |
|
"loss": 0.0847, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.9483985765124555, |
|
"grad_norm": 1.7346415116811966, |
|
"learning_rate": 5.404833002502398e-06, |
|
"loss": 0.0943, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.9501779359430605, |
|
"grad_norm": 1.7791350273499955, |
|
"learning_rate": 5.39090226574877e-06, |
|
"loss": 0.0943, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.9519572953736655, |
|
"grad_norm": 1.9489032953793617, |
|
"learning_rate": 5.376968475234258e-06, |
|
"loss": 0.1297, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.9537366548042705, |
|
"grad_norm": 1.355338673858814, |
|
"learning_rate": 5.363031739810787e-06, |
|
"loss": 0.0977, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.9555160142348754, |
|
"grad_norm": 1.4152726183088515, |
|
"learning_rate": 5.349092168353291e-06, |
|
"loss": 0.0981, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.9572953736654805, |
|
"grad_norm": 1.5209818628250624, |
|
"learning_rate": 5.335149869758855e-06, |
|
"loss": 0.0944, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.9590747330960854, |
|
"grad_norm": 1.4582909280623106, |
|
"learning_rate": 5.32120495294587e-06, |
|
"loss": 0.0887, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.9608540925266904, |
|
"grad_norm": 1.6556578681488117, |
|
"learning_rate": 5.3072575268531835e-06, |
|
"loss": 0.091, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.9626334519572953, |
|
"grad_norm": 1.7048920382237032, |
|
"learning_rate": 5.293307700439242e-06, |
|
"loss": 0.1089, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.9644128113879004, |
|
"grad_norm": 1.9445198005465678, |
|
"learning_rate": 5.2793555826812456e-06, |
|
"loss": 0.1034, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.9661921708185054, |
|
"grad_norm": 1.1133867538794493, |
|
"learning_rate": 5.265401282574294e-06, |
|
"loss": 0.0608, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.9679715302491103, |
|
"grad_norm": 1.9091737671895619, |
|
"learning_rate": 5.2514449091305375e-06, |
|
"loss": 0.1193, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.9697508896797153, |
|
"grad_norm": 1.430050972448983, |
|
"learning_rate": 5.237486571378317e-06, |
|
"loss": 0.099, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.9715302491103203, |
|
"grad_norm": 1.3524570409744352, |
|
"learning_rate": 5.22352637836133e-06, |
|
"loss": 0.0899, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.9733096085409253, |
|
"grad_norm": 1.2301673150910537, |
|
"learning_rate": 5.209564439137755e-06, |
|
"loss": 0.0831, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.9750889679715302, |
|
"grad_norm": 1.761380009401885, |
|
"learning_rate": 5.195600862779421e-06, |
|
"loss": 0.1325, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.9768683274021353, |
|
"grad_norm": 1.2846216493512475, |
|
"learning_rate": 5.181635758370942e-06, |
|
"loss": 0.0748, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.9786476868327402, |
|
"grad_norm": 1.4576408042135602, |
|
"learning_rate": 5.167669235008871e-06, |
|
"loss": 0.1103, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.9804270462633452, |
|
"grad_norm": 1.895177378632569, |
|
"learning_rate": 5.153701401800845e-06, |
|
"loss": 0.1203, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.9822064056939501, |
|
"grad_norm": 1.5918319001851655, |
|
"learning_rate": 5.139732367864736e-06, |
|
"loss": 0.1074, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.9839857651245552, |
|
"grad_norm": 1.4016276337871287, |
|
"learning_rate": 5.1257622423277934e-06, |
|
"loss": 0.0854, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.9857651245551602, |
|
"grad_norm": 1.5611962119515717, |
|
"learning_rate": 5.111791134325793e-06, |
|
"loss": 0.1179, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.9875444839857651, |
|
"grad_norm": 1.7545430255242929, |
|
"learning_rate": 5.097819153002192e-06, |
|
"loss": 0.1343, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.9893238434163701, |
|
"grad_norm": 1.2158807977806259, |
|
"learning_rate": 5.083846407507263e-06, |
|
"loss": 0.0823, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.9911032028469751, |
|
"grad_norm": 1.8020696174797162, |
|
"learning_rate": 5.0698730069972535e-06, |
|
"loss": 0.1339, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.9928825622775801, |
|
"grad_norm": 1.4988157885090532, |
|
"learning_rate": 5.055899060633524e-06, |
|
"loss": 0.0896, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.994661921708185, |
|
"grad_norm": 1.4138348190403207, |
|
"learning_rate": 5.041924677581702e-06, |
|
"loss": 0.1127, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.99644128113879, |
|
"grad_norm": 1.314877928027049, |
|
"learning_rate": 5.0279499670108245e-06, |
|
"loss": 0.0927, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.998220640569395, |
|
"grad_norm": 1.2930162078794682, |
|
"learning_rate": 5.013975038092491e-06, |
|
"loss": 0.0896, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.450729265625484, |
|
"learning_rate": 5e-06, |
|
"loss": 0.0822, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.001779359430605, |
|
"grad_norm": 1.1953682676571187, |
|
"learning_rate": 4.98602496190751e-06, |
|
"loss": 0.0562, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.00355871886121, |
|
"grad_norm": 0.8123429812754303, |
|
"learning_rate": 4.9720500329891755e-06, |
|
"loss": 0.0403, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.0053380782918149, |
|
"grad_norm": 1.3189704731661789, |
|
"learning_rate": 4.9580753224183005e-06, |
|
"loss": 0.0625, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.00711743772242, |
|
"grad_norm": 1.0023480518891619, |
|
"learning_rate": 4.944100939366478e-06, |
|
"loss": 0.0435, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.008896797153025, |
|
"grad_norm": 0.99764923482534, |
|
"learning_rate": 4.930126993002748e-06, |
|
"loss": 0.0481, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.01067615658363, |
|
"grad_norm": 0.8296881790680922, |
|
"learning_rate": 4.9161535924927375e-06, |
|
"loss": 0.0352, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.0124555160142348, |
|
"grad_norm": 1.006555641597263, |
|
"learning_rate": 4.90218084699781e-06, |
|
"loss": 0.0461, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.0142348754448398, |
|
"grad_norm": 1.5396168168151427, |
|
"learning_rate": 4.888208865674208e-06, |
|
"loss": 0.0609, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.0160142348754448, |
|
"grad_norm": 1.1377507503458753, |
|
"learning_rate": 4.874237757672209e-06, |
|
"loss": 0.0517, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.0177935943060499, |
|
"grad_norm": 1.2240132939089432, |
|
"learning_rate": 4.8602676321352646e-06, |
|
"loss": 0.0447, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.019572953736655, |
|
"grad_norm": 1.3229825492284517, |
|
"learning_rate": 4.846298598199155e-06, |
|
"loss": 0.0476, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.0213523131672597, |
|
"grad_norm": 1.1079242716536724, |
|
"learning_rate": 4.832330764991131e-06, |
|
"loss": 0.043, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.0231316725978647, |
|
"grad_norm": 1.3709099777436073, |
|
"learning_rate": 4.81836424162906e-06, |
|
"loss": 0.0525, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.0249110320284698, |
|
"grad_norm": 1.0409026220049002, |
|
"learning_rate": 4.80439913722058e-06, |
|
"loss": 0.0359, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.0266903914590748, |
|
"grad_norm": 1.054239543267635, |
|
"learning_rate": 4.790435560862247e-06, |
|
"loss": 0.0308, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.0284697508896796, |
|
"grad_norm": 1.156618593850115, |
|
"learning_rate": 4.776473621638673e-06, |
|
"loss": 0.0338, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.0302491103202847, |
|
"grad_norm": 1.2444379877567213, |
|
"learning_rate": 4.762513428621684e-06, |
|
"loss": 0.0396, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.0320284697508897, |
|
"grad_norm": 1.5774390094628499, |
|
"learning_rate": 4.748555090869464e-06, |
|
"loss": 0.0561, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.0338078291814947, |
|
"grad_norm": 1.290153760811143, |
|
"learning_rate": 4.734598717425706e-06, |
|
"loss": 0.0402, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.0355871886120998, |
|
"grad_norm": 1.7089183580527239, |
|
"learning_rate": 4.720644417318755e-06, |
|
"loss": 0.0497, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.0373665480427046, |
|
"grad_norm": 1.7935912236253955, |
|
"learning_rate": 4.70669229956076e-06, |
|
"loss": 0.0518, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.0391459074733096, |
|
"grad_norm": 1.445884968155379, |
|
"learning_rate": 4.692742473146818e-06, |
|
"loss": 0.0411, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.0409252669039146, |
|
"grad_norm": 1.411073816799605, |
|
"learning_rate": 4.678795047054131e-06, |
|
"loss": 0.0464, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.0427046263345197, |
|
"grad_norm": 1.5370474844619175, |
|
"learning_rate": 4.664850130241146e-06, |
|
"loss": 0.0422, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.0444839857651245, |
|
"grad_norm": 1.2158372632864995, |
|
"learning_rate": 4.650907831646711e-06, |
|
"loss": 0.0308, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.0462633451957295, |
|
"grad_norm": 1.5637810355006743, |
|
"learning_rate": 4.636968260189214e-06, |
|
"loss": 0.0506, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.0480427046263345, |
|
"grad_norm": 1.3636446718415562, |
|
"learning_rate": 4.623031524765744e-06, |
|
"loss": 0.041, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.0498220640569396, |
|
"grad_norm": 1.282823831962173, |
|
"learning_rate": 4.609097734251231e-06, |
|
"loss": 0.0361, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.0516014234875444, |
|
"grad_norm": 1.6510994794546678, |
|
"learning_rate": 4.595166997497605e-06, |
|
"loss": 0.0406, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.0533807829181494, |
|
"grad_norm": 1.5536352591872828, |
|
"learning_rate": 4.58123942333293e-06, |
|
"loss": 0.0428, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.0551601423487544, |
|
"grad_norm": 1.3297250928066762, |
|
"learning_rate": 4.567315120560573e-06, |
|
"loss": 0.0384, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.0569395017793595, |
|
"grad_norm": 1.0180705370260559, |
|
"learning_rate": 4.553394197958339e-06, |
|
"loss": 0.0259, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.0587188612099645, |
|
"grad_norm": 1.451484220566775, |
|
"learning_rate": 4.539476764277631e-06, |
|
"loss": 0.0394, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.0604982206405693, |
|
"grad_norm": 1.3197145968289676, |
|
"learning_rate": 4.525562928242592e-06, |
|
"loss": 0.0347, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.0622775800711743, |
|
"grad_norm": 1.3905939373087164, |
|
"learning_rate": 4.511652798549261e-06, |
|
"loss": 0.042, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.0640569395017794, |
|
"grad_norm": 1.4050756843159897, |
|
"learning_rate": 4.497746483864725e-06, |
|
"loss": 0.0345, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.0658362989323844, |
|
"grad_norm": 1.28181447516696, |
|
"learning_rate": 4.483844092826267e-06, |
|
"loss": 0.029, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.0676156583629894, |
|
"grad_norm": 0.9541159964036146, |
|
"learning_rate": 4.469945734040516e-06, |
|
"loss": 0.0283, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.0676156583629894, |
|
"eval_loss": 0.12607604265213013, |
|
"eval_runtime": 2.8444, |
|
"eval_samples_per_second": 16.172, |
|
"eval_steps_per_second": 4.219, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.0693950177935942, |
|
"grad_norm": 1.567005444223915, |
|
"learning_rate": 4.456051516082603e-06, |
|
"loss": 0.0452, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.0711743772241993, |
|
"grad_norm": 1.820321341185189, |
|
"learning_rate": 4.442161547495309e-06, |
|
"loss": 0.0507, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.0729537366548043, |
|
"grad_norm": 1.6740351390437986, |
|
"learning_rate": 4.42827593678822e-06, |
|
"loss": 0.0465, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.0747330960854093, |
|
"grad_norm": 1.5671140160238786, |
|
"learning_rate": 4.414394792436877e-06, |
|
"loss": 0.0362, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.0765124555160142, |
|
"grad_norm": 1.7200788795101603, |
|
"learning_rate": 4.400518222881931e-06, |
|
"loss": 0.0588, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.0782918149466192, |
|
"grad_norm": 1.390434585402565, |
|
"learning_rate": 4.386646336528291e-06, |
|
"loss": 0.0425, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.0800711743772242, |
|
"grad_norm": 1.8784199545458165, |
|
"learning_rate": 4.372779241744282e-06, |
|
"loss": 0.0455, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.0818505338078293, |
|
"grad_norm": 1.1694283705069697, |
|
"learning_rate": 4.358917046860799e-06, |
|
"loss": 0.0273, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.083629893238434, |
|
"grad_norm": 1.5123033090974332, |
|
"learning_rate": 4.345059860170458e-06, |
|
"loss": 0.0449, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.085409252669039, |
|
"grad_norm": 1.6108888435530755, |
|
"learning_rate": 4.331207789926746e-06, |
|
"loss": 0.0408, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.0871886120996441, |
|
"grad_norm": 1.2201544033310336, |
|
"learning_rate": 4.317360944343184e-06, |
|
"loss": 0.0373, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.0889679715302492, |
|
"grad_norm": 1.4844754591693525, |
|
"learning_rate": 4.303519431592479e-06, |
|
"loss": 0.0376, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.0907473309608542, |
|
"grad_norm": 1.675827537078509, |
|
"learning_rate": 4.289683359805673e-06, |
|
"loss": 0.0447, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.092526690391459, |
|
"grad_norm": 1.6966886366310556, |
|
"learning_rate": 4.275852837071309e-06, |
|
"loss": 0.0344, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.094306049822064, |
|
"grad_norm": 1.1981462983030247, |
|
"learning_rate": 4.26202797143457e-06, |
|
"loss": 0.0345, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.096085409252669, |
|
"grad_norm": 1.6516198921528065, |
|
"learning_rate": 4.248208870896456e-06, |
|
"loss": 0.0565, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.097864768683274, |
|
"grad_norm": 1.200423543387605, |
|
"learning_rate": 4.234395643412925e-06, |
|
"loss": 0.0364, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.099644128113879, |
|
"grad_norm": 1.2367926831904972, |
|
"learning_rate": 4.220588396894055e-06, |
|
"loss": 0.0402, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.101423487544484, |
|
"grad_norm": 1.7069126197823754, |
|
"learning_rate": 4.2067872392031965e-06, |
|
"loss": 0.0554, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.103202846975089, |
|
"grad_norm": 1.4330609676327815, |
|
"learning_rate": 4.192992278156141e-06, |
|
"loss": 0.0436, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.104982206405694, |
|
"grad_norm": 1.3627538759996851, |
|
"learning_rate": 4.179203621520262e-06, |
|
"loss": 0.0412, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.106761565836299, |
|
"grad_norm": 1.122635193430629, |
|
"learning_rate": 4.165421377013691e-06, |
|
"loss": 0.0279, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.1085409252669038, |
|
"grad_norm": 1.5836715602684714, |
|
"learning_rate": 4.151645652304465e-06, |
|
"loss": 0.0502, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.1103202846975089, |
|
"grad_norm": 2.080352660679961, |
|
"learning_rate": 4.137876555009684e-06, |
|
"loss": 0.0536, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.112099644128114, |
|
"grad_norm": 1.8099249505613468, |
|
"learning_rate": 4.124114192694676e-06, |
|
"loss": 0.0641, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.113879003558719, |
|
"grad_norm": 1.1403536256972713, |
|
"learning_rate": 4.110358672872158e-06, |
|
"loss": 0.0404, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.1156583629893237, |
|
"grad_norm": 1.1836843089650744, |
|
"learning_rate": 4.0966101030013915e-06, |
|
"loss": 0.0391, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.1174377224199288, |
|
"grad_norm": 1.1914062622350468, |
|
"learning_rate": 4.082868590487339e-06, |
|
"loss": 0.0351, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.1192170818505338, |
|
"grad_norm": 1.1157583448371302, |
|
"learning_rate": 4.069134242679837e-06, |
|
"loss": 0.0359, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.1209964412811388, |
|
"grad_norm": 1.7848552086830185, |
|
"learning_rate": 4.055407166872748e-06, |
|
"loss": 0.055, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.1227758007117439, |
|
"grad_norm": 1.263594968162713, |
|
"learning_rate": 4.041687470303127e-06, |
|
"loss": 0.0457, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.1245551601423487, |
|
"grad_norm": 1.1689272688394168, |
|
"learning_rate": 4.02797526015038e-06, |
|
"loss": 0.0412, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.1263345195729537, |
|
"grad_norm": 1.2748241344219966, |
|
"learning_rate": 4.014270643535427e-06, |
|
"loss": 0.0389, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.1281138790035588, |
|
"grad_norm": 1.675361085181448, |
|
"learning_rate": 4.000573727519868e-06, |
|
"loss": 0.0527, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.1298932384341638, |
|
"grad_norm": 1.2555108259413896, |
|
"learning_rate": 3.9868846191051465e-06, |
|
"loss": 0.0375, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.1316725978647686, |
|
"grad_norm": 1.4813068363978834, |
|
"learning_rate": 3.973203425231715e-06, |
|
"loss": 0.0555, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.1334519572953736, |
|
"grad_norm": 1.1689028514994826, |
|
"learning_rate": 3.959530252778187e-06, |
|
"loss": 0.0443, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.1352313167259787, |
|
"grad_norm": 1.8967341912662283, |
|
"learning_rate": 3.945865208560522e-06, |
|
"loss": 0.0709, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.1370106761565837, |
|
"grad_norm": 2.003116536102934, |
|
"learning_rate": 3.932208399331177e-06, |
|
"loss": 0.0627, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.1387900355871885, |
|
"grad_norm": 1.4906089969731395, |
|
"learning_rate": 3.918559931778277e-06, |
|
"loss": 0.0419, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.1405693950177935, |
|
"grad_norm": 1.2402181152340641, |
|
"learning_rate": 3.904919912524784e-06, |
|
"loss": 0.0312, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.1423487544483986, |
|
"grad_norm": 1.3826937425575372, |
|
"learning_rate": 3.891288448127654e-06, |
|
"loss": 0.0406, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.1441281138790036, |
|
"grad_norm": 1.5858715946880122, |
|
"learning_rate": 3.877665645077017e-06, |
|
"loss": 0.0605, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.1459074733096086, |
|
"grad_norm": 1.1592082505863286, |
|
"learning_rate": 3.86405160979534e-06, |
|
"loss": 0.0291, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.1476868327402134, |
|
"grad_norm": 1.1025974803717737, |
|
"learning_rate": 3.850446448636597e-06, |
|
"loss": 0.0308, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.1494661921708185, |
|
"grad_norm": 1.1232908326835684, |
|
"learning_rate": 3.8368502678854296e-06, |
|
"loss": 0.0325, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.1512455516014235, |
|
"grad_norm": 1.5430825877799026, |
|
"learning_rate": 3.8232631737563306e-06, |
|
"loss": 0.0409, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.1530249110320285, |
|
"grad_norm": 1.3633034587952768, |
|
"learning_rate": 3.809685272392804e-06, |
|
"loss": 0.0392, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.1548042704626336, |
|
"grad_norm": 1.2105433186898344, |
|
"learning_rate": 3.796116669866543e-06, |
|
"loss": 0.0409, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.1565836298932384, |
|
"grad_norm": 1.2623189453546828, |
|
"learning_rate": 3.78255747217659e-06, |
|
"loss": 0.036, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.1583629893238434, |
|
"grad_norm": 1.3028729999748314, |
|
"learning_rate": 3.769007785248523e-06, |
|
"loss": 0.0398, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.1601423487544484, |
|
"grad_norm": 1.4061801563260845, |
|
"learning_rate": 3.7554677149336186e-06, |
|
"loss": 0.0452, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.1619217081850535, |
|
"grad_norm": 1.4160971463593808, |
|
"learning_rate": 3.7419373670080284e-06, |
|
"loss": 0.0406, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.1637010676156583, |
|
"grad_norm": 1.4932629194153206, |
|
"learning_rate": 3.7284168471719527e-06, |
|
"loss": 0.0501, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.1654804270462633, |
|
"grad_norm": 2.086010371605074, |
|
"learning_rate": 3.7149062610488085e-06, |
|
"loss": 0.0455, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.1672597864768683, |
|
"grad_norm": 1.1929428961527198, |
|
"learning_rate": 3.701405714184416e-06, |
|
"loss": 0.041, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.1690391459074734, |
|
"grad_norm": 1.1125158373410753, |
|
"learning_rate": 3.687915312046166e-06, |
|
"loss": 0.0356, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.1708185053380782, |
|
"grad_norm": 1.8231927474516298, |
|
"learning_rate": 3.6744351600221994e-06, |
|
"loss": 0.0424, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.1725978647686832, |
|
"grad_norm": 1.433864946110474, |
|
"learning_rate": 3.6609653634205773e-06, |
|
"loss": 0.0538, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.1743772241992882, |
|
"grad_norm": 1.8194392138633524, |
|
"learning_rate": 3.647506027468467e-06, |
|
"loss": 0.0517, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.1761565836298933, |
|
"grad_norm": 1.2032469376034516, |
|
"learning_rate": 3.6340572573113176e-06, |
|
"loss": 0.0303, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.1779359430604983, |
|
"grad_norm": 0.8764442030541911, |
|
"learning_rate": 3.6206191580120346e-06, |
|
"loss": 0.0286, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.1797153024911031, |
|
"grad_norm": 1.678382078109151, |
|
"learning_rate": 3.6071918345501655e-06, |
|
"loss": 0.05, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.1814946619217082, |
|
"grad_norm": 1.2353785702746418, |
|
"learning_rate": 3.5937753918210705e-06, |
|
"loss": 0.0344, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.1832740213523132, |
|
"grad_norm": 1.4142286800063444, |
|
"learning_rate": 3.5803699346351117e-06, |
|
"loss": 0.0417, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.1850533807829182, |
|
"grad_norm": 1.0915764955089264, |
|
"learning_rate": 3.566975567716833e-06, |
|
"loss": 0.032, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.1868327402135233, |
|
"grad_norm": 1.1866793226343935, |
|
"learning_rate": 3.5535923957041374e-06, |
|
"loss": 0.0318, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.188612099644128, |
|
"grad_norm": 1.2003752108051162, |
|
"learning_rate": 3.540220523147474e-06, |
|
"loss": 0.0417, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.190391459074733, |
|
"grad_norm": 1.2030578999883368, |
|
"learning_rate": 3.5268600545090183e-06, |
|
"loss": 0.0339, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.1921708185053381, |
|
"grad_norm": 1.6082285747104639, |
|
"learning_rate": 3.513511094161858e-06, |
|
"loss": 0.047, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.193950177935943, |
|
"grad_norm": 1.7609176961887636, |
|
"learning_rate": 3.5001737463891793e-06, |
|
"loss": 0.0569, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.195729537366548, |
|
"grad_norm": 1.3858748724486465, |
|
"learning_rate": 3.4868481153834454e-06, |
|
"loss": 0.0452, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.197508896797153, |
|
"grad_norm": 1.4225316681283704, |
|
"learning_rate": 3.4735343052455905e-06, |
|
"loss": 0.0397, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.199288256227758, |
|
"grad_norm": 1.3584258581583493, |
|
"learning_rate": 3.4602324199842026e-06, |
|
"loss": 0.0428, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.201067615658363, |
|
"grad_norm": 1.184136732410501, |
|
"learning_rate": 3.446942563514711e-06, |
|
"loss": 0.0372, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.2028469750889679, |
|
"grad_norm": 1.3410434484713751, |
|
"learning_rate": 3.4336648396585777e-06, |
|
"loss": 0.0368, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.204626334519573, |
|
"grad_norm": 1.1390866533876867, |
|
"learning_rate": 3.4203993521424774e-06, |
|
"loss": 0.0397, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.206405693950178, |
|
"grad_norm": 0.8579301710816427, |
|
"learning_rate": 3.407146204597499e-06, |
|
"loss": 0.0223, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.208185053380783, |
|
"grad_norm": 1.3525045564548324, |
|
"learning_rate": 3.3939055005583305e-06, |
|
"loss": 0.043, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.209964412811388, |
|
"grad_norm": 1.63608834543694, |
|
"learning_rate": 3.3806773434624475e-06, |
|
"loss": 0.059, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.2117437722419928, |
|
"grad_norm": 1.3187305728446632, |
|
"learning_rate": 3.3674618366493117e-06, |
|
"loss": 0.0376, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.2135231316725978, |
|
"grad_norm": 1.3578339121179042, |
|
"learning_rate": 3.3542590833595533e-06, |
|
"loss": 0.0432, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.2153024911032029, |
|
"grad_norm": 1.4173354004831304, |
|
"learning_rate": 3.341069186734176e-06, |
|
"loss": 0.0397, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.217081850533808, |
|
"grad_norm": 1.081707215883141, |
|
"learning_rate": 3.3278922498137455e-06, |
|
"loss": 0.0327, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.2188612099644127, |
|
"grad_norm": 1.4251894888170504, |
|
"learning_rate": 3.314728375537587e-06, |
|
"loss": 0.0426, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.2206405693950177, |
|
"grad_norm": 1.4223264599184051, |
|
"learning_rate": 3.3015776667429724e-06, |
|
"loss": 0.0409, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.2224199288256228, |
|
"grad_norm": 1.080684694956512, |
|
"learning_rate": 3.2884402261643296e-06, |
|
"loss": 0.0284, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.2241992882562278, |
|
"grad_norm": 1.2321358283264663, |
|
"learning_rate": 3.2753161564324344e-06, |
|
"loss": 0.0391, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.2259786476868326, |
|
"grad_norm": 1.655052928937176, |
|
"learning_rate": 3.262205560073605e-06, |
|
"loss": 0.0345, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.2277580071174377, |
|
"grad_norm": 1.0125531570056403, |
|
"learning_rate": 3.249108539508909e-06, |
|
"loss": 0.0276, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.2295373665480427, |
|
"grad_norm": 1.4945947861689413, |
|
"learning_rate": 3.2360251970533527e-06, |
|
"loss": 0.0505, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.2313167259786477, |
|
"grad_norm": 1.2736333654923302, |
|
"learning_rate": 3.2229556349150947e-06, |
|
"loss": 0.0379, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.2330960854092528, |
|
"grad_norm": 1.3897560707765237, |
|
"learning_rate": 3.2098999551946337e-06, |
|
"loss": 0.0414, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.2348754448398576, |
|
"grad_norm": 1.6394236666264095, |
|
"learning_rate": 3.1968582598840234e-06, |
|
"loss": 0.0484, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.2366548042704626, |
|
"grad_norm": 2.4190349087670233, |
|
"learning_rate": 3.183830650866068e-06, |
|
"loss": 0.0632, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.2384341637010676, |
|
"grad_norm": 1.1572909024855405, |
|
"learning_rate": 3.1708172299135266e-06, |
|
"loss": 0.0311, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.2402135231316727, |
|
"grad_norm": 1.9802124918655826, |
|
"learning_rate": 3.1578180986883234e-06, |
|
"loss": 0.0538, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.2419928825622777, |
|
"grad_norm": 1.3699676184015943, |
|
"learning_rate": 3.1448333587407486e-06, |
|
"loss": 0.0351, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.2437722419928825, |
|
"grad_norm": 1.3516012841806628, |
|
"learning_rate": 3.131863111508667e-06, |
|
"loss": 0.0386, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.2455516014234875, |
|
"grad_norm": 1.5842353713803488, |
|
"learning_rate": 3.118907458316722e-06, |
|
"loss": 0.0468, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.2473309608540926, |
|
"grad_norm": 1.2292483458783163, |
|
"learning_rate": 3.105966500375551e-06, |
|
"loss": 0.0259, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.2491103202846976, |
|
"grad_norm": 1.2478012395827096, |
|
"learning_rate": 3.0930403387809892e-06, |
|
"loss": 0.0358, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.2508896797153026, |
|
"grad_norm": 1.388842997884192, |
|
"learning_rate": 3.080129074513285e-06, |
|
"loss": 0.0425, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.2526690391459074, |
|
"grad_norm": 1.6361902705350504, |
|
"learning_rate": 3.067232808436299e-06, |
|
"loss": 0.0473, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.2544483985765125, |
|
"grad_norm": 1.3739921558220733, |
|
"learning_rate": 3.0543516412967327e-06, |
|
"loss": 0.0416, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.2562277580071175, |
|
"grad_norm": 1.2517155782194465, |
|
"learning_rate": 3.041485673723331e-06, |
|
"loss": 0.0329, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.2580071174377223, |
|
"grad_norm": 1.2987507756109422, |
|
"learning_rate": 3.0286350062261017e-06, |
|
"loss": 0.0325, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.2597864768683273, |
|
"grad_norm": 1.1167857819130071, |
|
"learning_rate": 3.0157997391955172e-06, |
|
"loss": 0.0319, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.2615658362989324, |
|
"grad_norm": 1.5773333511820165, |
|
"learning_rate": 3.0029799729017518e-06, |
|
"loss": 0.0437, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.2633451957295374, |
|
"grad_norm": 1.3172524062099156, |
|
"learning_rate": 2.9901758074938797e-06, |
|
"loss": 0.0412, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.2651245551601424, |
|
"grad_norm": 2.070072782381349, |
|
"learning_rate": 2.977387342999103e-06, |
|
"loss": 0.0565, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.2669039145907472, |
|
"grad_norm": 1.6452723047052242, |
|
"learning_rate": 2.964614679321966e-06, |
|
"loss": 0.0373, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.2686832740213523, |
|
"grad_norm": 1.7123160552736036, |
|
"learning_rate": 2.951857916243574e-06, |
|
"loss": 0.0406, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.2704626334519573, |
|
"grad_norm": 1.6184278878566922, |
|
"learning_rate": 2.9391171534208185e-06, |
|
"loss": 0.0451, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.2722419928825623, |
|
"grad_norm": 2.081072892160861, |
|
"learning_rate": 2.9263924903855932e-06, |
|
"loss": 0.0622, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.2740213523131674, |
|
"grad_norm": 1.9703337212641012, |
|
"learning_rate": 2.9136840265440213e-06, |
|
"loss": 0.056, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.2758007117437722, |
|
"grad_norm": 1.2488379115247517, |
|
"learning_rate": 2.9009918611756732e-06, |
|
"loss": 0.0309, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.2775800711743772, |
|
"grad_norm": 1.2814946087205048, |
|
"learning_rate": 2.8883160934327968e-06, |
|
"loss": 0.0333, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.2793594306049823, |
|
"grad_norm": 1.7843155959222732, |
|
"learning_rate": 2.8756568223395396e-06, |
|
"loss": 0.0417, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.281138790035587, |
|
"grad_norm": 1.6127496010865379, |
|
"learning_rate": 2.8630141467911777e-06, |
|
"loss": 0.0453, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.282918149466192, |
|
"grad_norm": 1.4857896364384107, |
|
"learning_rate": 2.8503881655533395e-06, |
|
"loss": 0.0365, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.2846975088967971, |
|
"grad_norm": 1.4260297631627328, |
|
"learning_rate": 2.837778977261235e-06, |
|
"loss": 0.0445, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.2864768683274022, |
|
"grad_norm": 1.5824026893055725, |
|
"learning_rate": 2.8251866804188875e-06, |
|
"loss": 0.0431, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.2882562277580072, |
|
"grad_norm": 1.2278353651657032, |
|
"learning_rate": 2.812611373398365e-06, |
|
"loss": 0.0431, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.290035587188612, |
|
"grad_norm": 1.6672552310482833, |
|
"learning_rate": 2.8000531544390064e-06, |
|
"loss": 0.0476, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.291814946619217, |
|
"grad_norm": 1.4827460821518783, |
|
"learning_rate": 2.7875121216466595e-06, |
|
"loss": 0.0421, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.293594306049822, |
|
"grad_norm": 1.7303772511342517, |
|
"learning_rate": 2.7749883729929105e-06, |
|
"loss": 0.0387, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.295373665480427, |
|
"grad_norm": 1.2452214246999493, |
|
"learning_rate": 2.762482006314324e-06, |
|
"loss": 0.0276, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.2971530249110321, |
|
"grad_norm": 1.4311677178947606, |
|
"learning_rate": 2.7499931193116692e-06, |
|
"loss": 0.0386, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.298932384341637, |
|
"grad_norm": 1.1211606666629077, |
|
"learning_rate": 2.737521809549167e-06, |
|
"loss": 0.032, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.300711743772242, |
|
"grad_norm": 1.2798559609520561, |
|
"learning_rate": 2.725068174453722e-06, |
|
"loss": 0.0363, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.302491103202847, |
|
"grad_norm": 1.4140601669402832, |
|
"learning_rate": 2.712632311314165e-06, |
|
"loss": 0.0366, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.304270462633452, |
|
"grad_norm": 1.507936365182657, |
|
"learning_rate": 2.7002143172804875e-06, |
|
"loss": 0.0461, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.306049822064057, |
|
"grad_norm": 1.6584253256938686, |
|
"learning_rate": 2.6878142893630904e-06, |
|
"loss": 0.0469, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.3078291814946619, |
|
"grad_norm": 1.1663195704827793, |
|
"learning_rate": 2.6754323244320154e-06, |
|
"loss": 0.0367, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.309608540925267, |
|
"grad_norm": 1.5433520720635876, |
|
"learning_rate": 2.6630685192161995e-06, |
|
"loss": 0.052, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.311387900355872, |
|
"grad_norm": 0.9570814972862995, |
|
"learning_rate": 2.650722970302714e-06, |
|
"loss": 0.0245, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.3131672597864767, |
|
"grad_norm": 1.6826958521110618, |
|
"learning_rate": 2.638395774136009e-06, |
|
"loss": 0.0405, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.3149466192170818, |
|
"grad_norm": 1.4107878120903379, |
|
"learning_rate": 2.6260870270171645e-06, |
|
"loss": 0.0348, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.3167259786476868, |
|
"grad_norm": 1.4454348707862485, |
|
"learning_rate": 2.613796825103129e-06, |
|
"loss": 0.0384, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.3185053380782918, |
|
"grad_norm": 1.541882544495284, |
|
"learning_rate": 2.60152526440598e-06, |
|
"loss": 0.0398, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.3202846975088969, |
|
"grad_norm": 1.2047909289510674, |
|
"learning_rate": 2.5892724407921667e-06, |
|
"loss": 0.0357, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.3220640569395017, |
|
"grad_norm": 1.8710886424376283, |
|
"learning_rate": 2.577038449981763e-06, |
|
"loss": 0.0533, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.3238434163701067, |
|
"grad_norm": 1.4864125633254355, |
|
"learning_rate": 2.564823387547716e-06, |
|
"loss": 0.0438, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.3256227758007118, |
|
"grad_norm": 1.4568082998721188, |
|
"learning_rate": 2.552627348915106e-06, |
|
"loss": 0.0458, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.3274021352313168, |
|
"grad_norm": 1.2965641435984585, |
|
"learning_rate": 2.5404504293603983e-06, |
|
"loss": 0.0367, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.3291814946619218, |
|
"grad_norm": 1.6039565680763104, |
|
"learning_rate": 2.528292724010697e-06, |
|
"loss": 0.0557, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.3309608540925266, |
|
"grad_norm": 1.31842792736011, |
|
"learning_rate": 2.5161543278430055e-06, |
|
"loss": 0.0402, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.3327402135231317, |
|
"grad_norm": 1.3374950267046466, |
|
"learning_rate": 2.5040353356834756e-06, |
|
"loss": 0.0316, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.3345195729537367, |
|
"grad_norm": 1.1810433553151114, |
|
"learning_rate": 2.4919358422066816e-06, |
|
"loss": 0.0343, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.3362989323843417, |
|
"grad_norm": 1.1708361859222047, |
|
"learning_rate": 2.4798559419348672e-06, |
|
"loss": 0.0263, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.3380782918149468, |
|
"grad_norm": 1.1983604100893346, |
|
"learning_rate": 2.4677957292372166e-06, |
|
"loss": 0.0316, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.3398576512455516, |
|
"grad_norm": 1.270866766253193, |
|
"learning_rate": 2.455755298329107e-06, |
|
"loss": 0.035, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.3416370106761566, |
|
"grad_norm": 0.9549267889587485, |
|
"learning_rate": 2.4437347432713838e-06, |
|
"loss": 0.027, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.3434163701067616, |
|
"grad_norm": 1.2742907087163178, |
|
"learning_rate": 2.431734157969619e-06, |
|
"loss": 0.0311, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.3451957295373664, |
|
"grad_norm": 1.6266368214763591, |
|
"learning_rate": 2.4197536361733792e-06, |
|
"loss": 0.0522, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.3469750889679715, |
|
"grad_norm": 1.0435797007156282, |
|
"learning_rate": 2.407793271475495e-06, |
|
"loss": 0.0347, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.3487544483985765, |
|
"grad_norm": 1.2628953872576205, |
|
"learning_rate": 2.3958531573113223e-06, |
|
"loss": 0.0417, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.3505338078291815, |
|
"grad_norm": 1.5160486542982363, |
|
"learning_rate": 2.3839333869580243e-06, |
|
"loss": 0.0345, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.3523131672597866, |
|
"grad_norm": 1.1613340076348753, |
|
"learning_rate": 2.372034053533835e-06, |
|
"loss": 0.0266, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.3540925266903914, |
|
"grad_norm": 1.7625303124680602, |
|
"learning_rate": 2.360155249997334e-06, |
|
"loss": 0.0456, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.3558718861209964, |
|
"grad_norm": 1.3539009201135719, |
|
"learning_rate": 2.348297069146715e-06, |
|
"loss": 0.0316, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.3576512455516014, |
|
"grad_norm": 1.162485977044709, |
|
"learning_rate": 2.3364596036190706e-06, |
|
"loss": 0.0289, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.3594306049822065, |
|
"grad_norm": 1.2139927014178364, |
|
"learning_rate": 2.3246429458896637e-06, |
|
"loss": 0.0331, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.3612099644128115, |
|
"grad_norm": 1.39684037867632, |
|
"learning_rate": 2.312847188271203e-06, |
|
"loss": 0.0485, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.3629893238434163, |
|
"grad_norm": 1.8929234202023197, |
|
"learning_rate": 2.301072422913123e-06, |
|
"loss": 0.0461, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.3647686832740213, |
|
"grad_norm": 1.308329638889878, |
|
"learning_rate": 2.2893187418008666e-06, |
|
"loss": 0.0349, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.3665480427046264, |
|
"grad_norm": 1.4484370473711663, |
|
"learning_rate": 2.2775862367551642e-06, |
|
"loss": 0.0342, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.3683274021352312, |
|
"grad_norm": 1.409819013177999, |
|
"learning_rate": 2.265874999431318e-06, |
|
"loss": 0.0348, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.3701067615658362, |
|
"grad_norm": 1.5086672344878724, |
|
"learning_rate": 2.254185121318484e-06, |
|
"loss": 0.0339, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.3718861209964412, |
|
"grad_norm": 1.598883001852172, |
|
"learning_rate": 2.2425166937389596e-06, |
|
"loss": 0.0497, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.3736654804270463, |
|
"grad_norm": 1.4088740501926291, |
|
"learning_rate": 2.2308698078474645e-06, |
|
"loss": 0.0427, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.3754448398576513, |
|
"grad_norm": 1.3010529332616332, |
|
"learning_rate": 2.219244554630438e-06, |
|
"loss": 0.04, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.3772241992882561, |
|
"grad_norm": 1.4102837365546397, |
|
"learning_rate": 2.207641024905322e-06, |
|
"loss": 0.05, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.3790035587188612, |
|
"grad_norm": 1.212600470932334, |
|
"learning_rate": 2.1960593093198508e-06, |
|
"loss": 0.0253, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.3807829181494662, |
|
"grad_norm": 1.1168247416631372, |
|
"learning_rate": 2.184499498351347e-06, |
|
"loss": 0.0305, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.3825622775800712, |
|
"grad_norm": 1.5084097600474897, |
|
"learning_rate": 2.172961682306011e-06, |
|
"loss": 0.0378, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.3843416370106763, |
|
"grad_norm": 1.2846393381141363, |
|
"learning_rate": 2.1614459513182173e-06, |
|
"loss": 0.034, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.386120996441281, |
|
"grad_norm": 1.2313432100247172, |
|
"learning_rate": 2.149952395349813e-06, |
|
"loss": 0.0316, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.387900355871886, |
|
"grad_norm": 1.6561310854202442, |
|
"learning_rate": 2.1384811041894055e-06, |
|
"loss": 0.0462, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.3896797153024911, |
|
"grad_norm": 1.1678960062800439, |
|
"learning_rate": 2.1270321674516736e-06, |
|
"loss": 0.028, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.3914590747330962, |
|
"grad_norm": 1.3715440212700634, |
|
"learning_rate": 2.1156056745766593e-06, |
|
"loss": 0.0281, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.3932384341637012, |
|
"grad_norm": 1.3139391887774476, |
|
"learning_rate": 2.104201714829074e-06, |
|
"loss": 0.0358, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.395017793594306, |
|
"grad_norm": 1.8001870974888021, |
|
"learning_rate": 2.0928203772975917e-06, |
|
"loss": 0.058, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.396797153024911, |
|
"grad_norm": 1.4015110900422676, |
|
"learning_rate": 2.081461750894166e-06, |
|
"loss": 0.042, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.398576512455516, |
|
"grad_norm": 1.5586442694957636, |
|
"learning_rate": 2.070125924353328e-06, |
|
"loss": 0.0485, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.4003558718861209, |
|
"grad_norm": 1.0743624935808682, |
|
"learning_rate": 2.058812986231493e-06, |
|
"loss": 0.0305, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.402135231316726, |
|
"grad_norm": 1.7289663019132786, |
|
"learning_rate": 2.0475230249062727e-06, |
|
"loss": 0.0521, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.403914590747331, |
|
"grad_norm": 1.5425068391353152, |
|
"learning_rate": 2.0362561285757766e-06, |
|
"loss": 0.0459, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.405693950177936, |
|
"grad_norm": 1.760169029980413, |
|
"learning_rate": 2.0250123852579347e-06, |
|
"loss": 0.039, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.407473309608541, |
|
"grad_norm": 0.9476623470545859, |
|
"learning_rate": 2.013791882789801e-06, |
|
"loss": 0.0202, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.4092526690391458, |
|
"grad_norm": 1.0576319162158405, |
|
"learning_rate": 2.0025947088268714e-06, |
|
"loss": 0.0243, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.4110320284697508, |
|
"grad_norm": 1.2796492520410978, |
|
"learning_rate": 1.9914209508423943e-06, |
|
"loss": 0.0343, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.4128113879003559, |
|
"grad_norm": 1.1851617172503845, |
|
"learning_rate": 1.9802706961266936e-06, |
|
"loss": 0.0324, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.414590747330961, |
|
"grad_norm": 1.9986208484699237, |
|
"learning_rate": 1.969144031786483e-06, |
|
"loss": 0.0612, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.416370106761566, |
|
"grad_norm": 1.2736652687646093, |
|
"learning_rate": 1.958041044744186e-06, |
|
"loss": 0.0452, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.4181494661921707, |
|
"grad_norm": 1.032144429113135, |
|
"learning_rate": 1.94696182173726e-06, |
|
"loss": 0.0246, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.4199288256227758, |
|
"grad_norm": 1.4250014864707092, |
|
"learning_rate": 1.9359064493175077e-06, |
|
"loss": 0.0328, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.4217081850533808, |
|
"grad_norm": 1.3234901565543935, |
|
"learning_rate": 1.9248750138504176e-06, |
|
"loss": 0.0403, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.4234875444839858, |
|
"grad_norm": 1.2815849065768874, |
|
"learning_rate": 1.9138676015144765e-06, |
|
"loss": 0.0324, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.4234875444839858, |
|
"eval_loss": 0.11651196330785751, |
|
"eval_runtime": 2.8593, |
|
"eval_samples_per_second": 16.088, |
|
"eval_steps_per_second": 4.197, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.4252669039145909, |
|
"grad_norm": 1.1178598969687839, |
|
"learning_rate": 1.9028842983005036e-06, |
|
"loss": 0.0274, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.4270462633451957, |
|
"grad_norm": 1.50676275346977, |
|
"learning_rate": 1.8919251900109697e-06, |
|
"loss": 0.0464, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.4288256227758007, |
|
"grad_norm": 1.0467026524336447, |
|
"learning_rate": 1.8809903622593395e-06, |
|
"loss": 0.0274, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.4306049822064058, |
|
"grad_norm": 1.1486034520443957, |
|
"learning_rate": 1.870079900469392e-06, |
|
"loss": 0.0392, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.4323843416370106, |
|
"grad_norm": 1.2833811856219617, |
|
"learning_rate": 1.8591938898745593e-06, |
|
"loss": 0.0345, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.4341637010676156, |
|
"grad_norm": 1.3104062255658169, |
|
"learning_rate": 1.8483324155172594e-06, |
|
"loss": 0.0293, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.4359430604982206, |
|
"grad_norm": 1.1981128742127514, |
|
"learning_rate": 1.837495562248226e-06, |
|
"loss": 0.0361, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.4377224199288257, |
|
"grad_norm": 1.3041428623627058, |
|
"learning_rate": 1.8266834147258577e-06, |
|
"loss": 0.0335, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.4395017793594307, |
|
"grad_norm": 1.1845043147892065, |
|
"learning_rate": 1.8158960574155455e-06, |
|
"loss": 0.0298, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.4412811387900355, |
|
"grad_norm": 1.7198780283139397, |
|
"learning_rate": 1.8051335745890196e-06, |
|
"loss": 0.041, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.4430604982206405, |
|
"grad_norm": 1.1932621048888319, |
|
"learning_rate": 1.7943960503236856e-06, |
|
"loss": 0.0285, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.4448398576512456, |
|
"grad_norm": 1.2548189934552545, |
|
"learning_rate": 1.7836835685019732e-06, |
|
"loss": 0.0374, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.4466192170818506, |
|
"grad_norm": 1.6798388319303819, |
|
"learning_rate": 1.7729962128106787e-06, |
|
"loss": 0.053, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.4483985765124556, |
|
"grad_norm": 0.9975820753170506, |
|
"learning_rate": 1.7623340667403089e-06, |
|
"loss": 0.0227, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.4501779359430604, |
|
"grad_norm": 1.9826171750317803, |
|
"learning_rate": 1.7516972135844352e-06, |
|
"loss": 0.0571, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.4519572953736655, |
|
"grad_norm": 1.3428206947811356, |
|
"learning_rate": 1.741085736439031e-06, |
|
"loss": 0.0403, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.4537366548042705, |
|
"grad_norm": 1.2682702158092092, |
|
"learning_rate": 1.730499718201838e-06, |
|
"loss": 0.0307, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.4555160142348753, |
|
"grad_norm": 1.4303443809241172, |
|
"learning_rate": 1.7199392415717064e-06, |
|
"loss": 0.0384, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.4572953736654806, |
|
"grad_norm": 1.5997690542134242, |
|
"learning_rate": 1.7094043890479557e-06, |
|
"loss": 0.0451, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.4590747330960854, |
|
"grad_norm": 1.2280707144428875, |
|
"learning_rate": 1.698895242929725e-06, |
|
"loss": 0.0333, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.4608540925266904, |
|
"grad_norm": 1.413638817775182, |
|
"learning_rate": 1.6884118853153358e-06, |
|
"loss": 0.0413, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.4626334519572954, |
|
"grad_norm": 1.4152808456819603, |
|
"learning_rate": 1.6779543981016478e-06, |
|
"loss": 0.0391, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.4644128113879002, |
|
"grad_norm": 1.3156377066384624, |
|
"learning_rate": 1.6675228629834133e-06, |
|
"loss": 0.0402, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.4661921708185053, |
|
"grad_norm": 1.1953776116944157, |
|
"learning_rate": 1.657117361452651e-06, |
|
"loss": 0.0366, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.4679715302491103, |
|
"grad_norm": 1.2405661723197254, |
|
"learning_rate": 1.6467379747980011e-06, |
|
"loss": 0.0356, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.4697508896797153, |
|
"grad_norm": 1.0199902573399275, |
|
"learning_rate": 1.6363847841040914e-06, |
|
"loss": 0.024, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.4715302491103204, |
|
"grad_norm": 1.2801186814645826, |
|
"learning_rate": 1.626057870250906e-06, |
|
"loss": 0.0422, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.4733096085409252, |
|
"grad_norm": 1.31150230138265, |
|
"learning_rate": 1.6157573139131527e-06, |
|
"loss": 0.0306, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.4750889679715302, |
|
"grad_norm": 0.9465396015443895, |
|
"learning_rate": 1.605483195559628e-06, |
|
"loss": 0.027, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.4768683274021353, |
|
"grad_norm": 1.3066583093068012, |
|
"learning_rate": 1.5952355954525966e-06, |
|
"loss": 0.0314, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.4786476868327403, |
|
"grad_norm": 1.2157126589369238, |
|
"learning_rate": 1.5850145936471607e-06, |
|
"loss": 0.0314, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.4804270462633453, |
|
"grad_norm": 1.7675873670564315, |
|
"learning_rate": 1.5748202699906335e-06, |
|
"loss": 0.0546, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.4822064056939501, |
|
"grad_norm": 1.4977083975850225, |
|
"learning_rate": 1.5646527041219128e-06, |
|
"loss": 0.0401, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.4839857651245552, |
|
"grad_norm": 1.2793527018222959, |
|
"learning_rate": 1.5545119754708682e-06, |
|
"loss": 0.0353, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.4857651245551602, |
|
"grad_norm": 1.5293193564842322, |
|
"learning_rate": 1.544398163257711e-06, |
|
"loss": 0.035, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.487544483985765, |
|
"grad_norm": 1.3824378871742566, |
|
"learning_rate": 1.5343113464923808e-06, |
|
"loss": 0.0381, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.48932384341637, |
|
"grad_norm": 0.9500102530333001, |
|
"learning_rate": 1.524251603973927e-06, |
|
"loss": 0.0267, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.491103202846975, |
|
"grad_norm": 1.3528061785746208, |
|
"learning_rate": 1.5142190142898883e-06, |
|
"loss": 0.0377, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.49288256227758, |
|
"grad_norm": 1.3025015324230471, |
|
"learning_rate": 1.5042136558156883e-06, |
|
"loss": 0.0367, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.4946619217081851, |
|
"grad_norm": 1.6205954654121002, |
|
"learning_rate": 1.4942356067140162e-06, |
|
"loss": 0.0483, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.49644128113879, |
|
"grad_norm": 0.9802106709163598, |
|
"learning_rate": 1.4842849449342195e-06, |
|
"loss": 0.0247, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.498220640569395, |
|
"grad_norm": 1.266909482433793, |
|
"learning_rate": 1.4743617482116896e-06, |
|
"loss": 0.0343, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 1.3951719845514776, |
|
"learning_rate": 1.4644660940672628e-06, |
|
"loss": 0.0322, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.501779359430605, |
|
"grad_norm": 1.110024741670184, |
|
"learning_rate": 1.454598059806609e-06, |
|
"loss": 0.0322, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.50355871886121, |
|
"grad_norm": 1.6242817312383373, |
|
"learning_rate": 1.4447577225196296e-06, |
|
"loss": 0.0388, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.5053380782918149, |
|
"grad_norm": 1.2712794619793366, |
|
"learning_rate": 1.4349451590798564e-06, |
|
"loss": 0.036, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.50711743772242, |
|
"grad_norm": 1.1801209610933454, |
|
"learning_rate": 1.4251604461438444e-06, |
|
"loss": 0.0339, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.508896797153025, |
|
"grad_norm": 0.9564236960688246, |
|
"learning_rate": 1.4154036601505834e-06, |
|
"loss": 0.0209, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.5106761565836297, |
|
"grad_norm": 1.4749623031434258, |
|
"learning_rate": 1.4056748773208933e-06, |
|
"loss": 0.0402, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.512455516014235, |
|
"grad_norm": 1.4604434123916978, |
|
"learning_rate": 1.3959741736568339e-06, |
|
"loss": 0.0401, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.5142348754448398, |
|
"grad_norm": 1.3953672018242569, |
|
"learning_rate": 1.3863016249411027e-06, |
|
"loss": 0.0344, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.5160142348754448, |
|
"grad_norm": 1.0290780401614064, |
|
"learning_rate": 1.376657306736453e-06, |
|
"loss": 0.0337, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.5177935943060499, |
|
"grad_norm": 1.4221729830817782, |
|
"learning_rate": 1.3670412943850975e-06, |
|
"loss": 0.0383, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.5195729537366547, |
|
"grad_norm": 1.3932717642481327, |
|
"learning_rate": 1.3574536630081208e-06, |
|
"loss": 0.0343, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.52135231316726, |
|
"grad_norm": 1.5706221844365449, |
|
"learning_rate": 1.347894487504896e-06, |
|
"loss": 0.0474, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.5231316725978647, |
|
"grad_norm": 1.2220405831378893, |
|
"learning_rate": 1.3383638425524909e-06, |
|
"loss": 0.0308, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.5249110320284698, |
|
"grad_norm": 1.1539956107683431, |
|
"learning_rate": 1.3288618026050943e-06, |
|
"loss": 0.0326, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.5266903914590748, |
|
"grad_norm": 1.5550244588968958, |
|
"learning_rate": 1.31938844189343e-06, |
|
"loss": 0.0475, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.5284697508896796, |
|
"grad_norm": 1.998291980293586, |
|
"learning_rate": 1.3099438344241777e-06, |
|
"loss": 0.0617, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.5302491103202847, |
|
"grad_norm": 1.185671996886552, |
|
"learning_rate": 1.3005280539793908e-06, |
|
"loss": 0.031, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.5320284697508897, |
|
"grad_norm": 1.148760132829168, |
|
"learning_rate": 1.2911411741159273e-06, |
|
"loss": 0.0308, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.5338078291814945, |
|
"grad_norm": 1.4974218521132883, |
|
"learning_rate": 1.2817832681648712e-06, |
|
"loss": 0.0504, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.5355871886120998, |
|
"grad_norm": 1.369939956652242, |
|
"learning_rate": 1.2724544092309581e-06, |
|
"loss": 0.0377, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.5373665480427046, |
|
"grad_norm": 1.0740044584142066, |
|
"learning_rate": 1.2631546701920073e-06, |
|
"loss": 0.0282, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.5391459074733096, |
|
"grad_norm": 1.3481555464634867, |
|
"learning_rate": 1.2538841236983519e-06, |
|
"loss": 0.0342, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.5409252669039146, |
|
"grad_norm": 1.4633970205527596, |
|
"learning_rate": 1.244642842172266e-06, |
|
"loss": 0.0324, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.5427046263345194, |
|
"grad_norm": 2.4076472325618226, |
|
"learning_rate": 1.2354308978074088e-06, |
|
"loss": 0.0306, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.5444839857651247, |
|
"grad_norm": 1.1424129508019327, |
|
"learning_rate": 1.2262483625682514e-06, |
|
"loss": 0.0305, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.5462633451957295, |
|
"grad_norm": 1.4046799244901196, |
|
"learning_rate": 1.2170953081895214e-06, |
|
"loss": 0.0405, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.5480427046263345, |
|
"grad_norm": 1.2915760683084958, |
|
"learning_rate": 1.2079718061756369e-06, |
|
"loss": 0.0356, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.5498220640569396, |
|
"grad_norm": 1.2625869361407607, |
|
"learning_rate": 1.1988779278001517e-06, |
|
"loss": 0.0379, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.5516014234875444, |
|
"grad_norm": 1.1745092450187662, |
|
"learning_rate": 1.1898137441051982e-06, |
|
"loss": 0.0334, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.5533807829181496, |
|
"grad_norm": 1.4524765681446155, |
|
"learning_rate": 1.1807793259009282e-06, |
|
"loss": 0.0408, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.5551601423487544, |
|
"grad_norm": 1.4104204635968853, |
|
"learning_rate": 1.1717747437649657e-06, |
|
"loss": 0.0389, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.5569395017793595, |
|
"grad_norm": 1.1419028978650676, |
|
"learning_rate": 1.1628000680418533e-06, |
|
"loss": 0.0252, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.5587188612099645, |
|
"grad_norm": 1.4070859011844123, |
|
"learning_rate": 1.1538553688425002e-06, |
|
"loss": 0.0325, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.5604982206405693, |
|
"grad_norm": 1.376605595900302, |
|
"learning_rate": 1.14494071604364e-06, |
|
"loss": 0.0329, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.5622775800711743, |
|
"grad_norm": 1.173727714152593, |
|
"learning_rate": 1.1360561792872754e-06, |
|
"loss": 0.0288, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.5640569395017794, |
|
"grad_norm": 3.4243604866027275, |
|
"learning_rate": 1.127201827980145e-06, |
|
"loss": 0.0463, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.5658362989323842, |
|
"grad_norm": 1.18022276828782, |
|
"learning_rate": 1.1183777312931748e-06, |
|
"loss": 0.0242, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.5676156583629894, |
|
"grad_norm": 0.9868400720503788, |
|
"learning_rate": 1.1095839581609407e-06, |
|
"loss": 0.0236, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.5693950177935942, |
|
"grad_norm": 1.7830340578183923, |
|
"learning_rate": 1.1008205772811248e-06, |
|
"loss": 0.0405, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.5711743772241993, |
|
"grad_norm": 1.5360933866268385, |
|
"learning_rate": 1.0920876571139843e-06, |
|
"loss": 0.0431, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.5729537366548043, |
|
"grad_norm": 1.102134896817105, |
|
"learning_rate": 1.0833852658818167e-06, |
|
"loss": 0.0283, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.5747330960854091, |
|
"grad_norm": 1.558242945042404, |
|
"learning_rate": 1.0747134715684221e-06, |
|
"loss": 0.0329, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.5765124555160144, |
|
"grad_norm": 1.432420874437436, |
|
"learning_rate": 1.0660723419185776e-06, |
|
"loss": 0.0411, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.5782918149466192, |
|
"grad_norm": 1.4675354985459867, |
|
"learning_rate": 1.0574619444375017e-06, |
|
"loss": 0.0307, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.5800711743772242, |
|
"grad_norm": 1.0601986726741774, |
|
"learning_rate": 1.0488823463903341e-06, |
|
"loss": 0.027, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.5818505338078293, |
|
"grad_norm": 1.2429576622952991, |
|
"learning_rate": 1.0403336148016053e-06, |
|
"loss": 0.0306, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.583629893238434, |
|
"grad_norm": 1.245880442088894, |
|
"learning_rate": 1.0318158164547159e-06, |
|
"loss": 0.037, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.585409252669039, |
|
"grad_norm": 1.4291957498905559, |
|
"learning_rate": 1.0233290178914096e-06, |
|
"loss": 0.0292, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.5871886120996441, |
|
"grad_norm": 1.4930455037899026, |
|
"learning_rate": 1.014873285411262e-06, |
|
"loss": 0.0403, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.5889679715302492, |
|
"grad_norm": 1.305926923425324, |
|
"learning_rate": 1.006448685071154e-06, |
|
"loss": 0.039, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.5907473309608542, |
|
"grad_norm": 1.5585828820315304, |
|
"learning_rate": 9.980552826847635e-07, |
|
"loss": 0.0422, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.592526690391459, |
|
"grad_norm": 1.403477585881213, |
|
"learning_rate": 9.896931438220453e-07, |
|
"loss": 0.042, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.594306049822064, |
|
"grad_norm": 1.1754825911541846, |
|
"learning_rate": 9.813623338087181e-07, |
|
"loss": 0.0298, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.596085409252669, |
|
"grad_norm": 1.9598400310978035, |
|
"learning_rate": 9.730629177257623e-07, |
|
"loss": 0.0422, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.5978647686832739, |
|
"grad_norm": 1.2482312628151755, |
|
"learning_rate": 9.64794960408903e-07, |
|
"loss": 0.0309, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.5996441281138791, |
|
"grad_norm": 1.366132915789022, |
|
"learning_rate": 9.565585264481092e-07, |
|
"loss": 0.0382, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.601423487544484, |
|
"grad_norm": 1.0289551536403272, |
|
"learning_rate": 9.483536801870835e-07, |
|
"loss": 0.0252, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.603202846975089, |
|
"grad_norm": 1.0957606681103695, |
|
"learning_rate": 9.401804857227648e-07, |
|
"loss": 0.029, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 1.604982206405694, |
|
"grad_norm": 1.3996675696377936, |
|
"learning_rate": 9.320390069048258e-07, |
|
"loss": 0.0312, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 1.6067615658362988, |
|
"grad_norm": 1.1188466104113475, |
|
"learning_rate": 9.239293073351735e-07, |
|
"loss": 0.0282, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 1.608540925266904, |
|
"grad_norm": 1.6264055606931755, |
|
"learning_rate": 9.158514503674543e-07, |
|
"loss": 0.0463, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 1.6103202846975089, |
|
"grad_norm": 1.0510008956235406, |
|
"learning_rate": 9.078054991065532e-07, |
|
"loss": 0.0333, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.612099644128114, |
|
"grad_norm": 1.5812635476645758, |
|
"learning_rate": 8.997915164081095e-07, |
|
"loss": 0.0497, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 1.613879003558719, |
|
"grad_norm": 1.5976925510481317, |
|
"learning_rate": 8.918095648780195e-07, |
|
"loss": 0.0432, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 1.6156583629893237, |
|
"grad_norm": 1.606059516266359, |
|
"learning_rate": 8.838597068719518e-07, |
|
"loss": 0.0363, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 1.6174377224199288, |
|
"grad_norm": 1.398561914911457, |
|
"learning_rate": 8.75942004494853e-07, |
|
"loss": 0.0353, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 1.6192170818505338, |
|
"grad_norm": 1.547802992634236, |
|
"learning_rate": 8.680565196004704e-07, |
|
"loss": 0.0354, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.6209964412811388, |
|
"grad_norm": 1.2921811197776576, |
|
"learning_rate": 8.602033137908666e-07, |
|
"loss": 0.028, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 1.6227758007117439, |
|
"grad_norm": 1.339246959467236, |
|
"learning_rate": 8.523824484159348e-07, |
|
"loss": 0.0297, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 1.6245551601423487, |
|
"grad_norm": 1.1308040066824512, |
|
"learning_rate": 8.445939845729245e-07, |
|
"loss": 0.0277, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 1.6263345195729537, |
|
"grad_norm": 1.033714750608927, |
|
"learning_rate": 8.368379831059592e-07, |
|
"loss": 0.0254, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.6281138790035588, |
|
"grad_norm": 1.3626331342703988, |
|
"learning_rate": 8.29114504605566e-07, |
|
"loss": 0.0374, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.6298932384341636, |
|
"grad_norm": 1.4292997126966243, |
|
"learning_rate": 8.21423609408199e-07, |
|
"loss": 0.0404, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.6316725978647688, |
|
"grad_norm": 1.5374938298163654, |
|
"learning_rate": 8.137653575957666e-07, |
|
"loss": 0.0322, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 1.6334519572953736, |
|
"grad_norm": 1.2071509278215038, |
|
"learning_rate": 8.061398089951678e-07, |
|
"loss": 0.0306, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 1.6352313167259787, |
|
"grad_norm": 1.3290995686813996, |
|
"learning_rate": 7.985470231778203e-07, |
|
"loss": 0.0331, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 1.6370106761565837, |
|
"grad_norm": 1.5201894320183187, |
|
"learning_rate": 7.909870594591951e-07, |
|
"loss": 0.0388, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.6387900355871885, |
|
"grad_norm": 1.7079018945609106, |
|
"learning_rate": 7.834599768983553e-07, |
|
"loss": 0.0437, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 1.6405693950177938, |
|
"grad_norm": 1.1074860977246563, |
|
"learning_rate": 7.759658342974951e-07, |
|
"loss": 0.0257, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 1.6423487544483986, |
|
"grad_norm": 1.246814730932286, |
|
"learning_rate": 7.685046902014747e-07, |
|
"loss": 0.0348, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 1.6441281138790036, |
|
"grad_norm": 1.6797149756440681, |
|
"learning_rate": 7.61076602897371e-07, |
|
"loss": 0.0402, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.6459074733096086, |
|
"grad_norm": 1.4087463889572616, |
|
"learning_rate": 7.536816304140177e-07, |
|
"loss": 0.0389, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.6476868327402134, |
|
"grad_norm": 1.9787281147665718, |
|
"learning_rate": 7.46319830521553e-07, |
|
"loss": 0.0516, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 1.6494661921708185, |
|
"grad_norm": 1.1096368238071512, |
|
"learning_rate": 7.389912607309662e-07, |
|
"loss": 0.03, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 1.6512455516014235, |
|
"grad_norm": 1.5134470324553184, |
|
"learning_rate": 7.316959782936516e-07, |
|
"loss": 0.0396, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 1.6530249110320283, |
|
"grad_norm": 0.9559539707658596, |
|
"learning_rate": 7.244340402009608e-07, |
|
"loss": 0.029, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 1.6548042704626336, |
|
"grad_norm": 2.282822385516168, |
|
"learning_rate": 7.172055031837572e-07, |
|
"loss": 0.0464, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.6565836298932384, |
|
"grad_norm": 1.4821633395596163, |
|
"learning_rate": 7.100104237119676e-07, |
|
"loss": 0.0394, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 1.6583629893238434, |
|
"grad_norm": 0.9825576164039169, |
|
"learning_rate": 7.028488579941506e-07, |
|
"loss": 0.0247, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 1.6601423487544484, |
|
"grad_norm": 1.5868095916808034, |
|
"learning_rate": 6.957208619770505e-07, |
|
"loss": 0.0422, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 1.6619217081850532, |
|
"grad_norm": 1.59543391689075, |
|
"learning_rate": 6.886264913451635e-07, |
|
"loss": 0.0455, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.6637010676156585, |
|
"grad_norm": 1.7131999034214405, |
|
"learning_rate": 6.815658015203014e-07, |
|
"loss": 0.0473, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.6654804270462633, |
|
"grad_norm": 1.2302292589904051, |
|
"learning_rate": 6.745388476611553e-07, |
|
"loss": 0.0338, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.6672597864768683, |
|
"grad_norm": 1.2873084179689118, |
|
"learning_rate": 6.67545684662873e-07, |
|
"loss": 0.0321, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 1.6690391459074734, |
|
"grad_norm": 1.2218759831482584, |
|
"learning_rate": 6.605863671566221e-07, |
|
"loss": 0.0342, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.6708185053380782, |
|
"grad_norm": 1.2953760941840922, |
|
"learning_rate": 6.536609495091695e-07, |
|
"loss": 0.0369, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 1.6725978647686834, |
|
"grad_norm": 1.5203558068091225, |
|
"learning_rate": 6.467694858224488e-07, |
|
"loss": 0.041, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.6743772241992882, |
|
"grad_norm": 1.0492777500160095, |
|
"learning_rate": 6.399120299331468e-07, |
|
"loss": 0.0283, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 1.6761565836298933, |
|
"grad_norm": 0.9285568907498712, |
|
"learning_rate": 6.330886354122768e-07, |
|
"loss": 0.0242, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 1.6779359430604983, |
|
"grad_norm": 1.1052095878188621, |
|
"learning_rate": 6.262993555647617e-07, |
|
"loss": 0.031, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 1.6797153024911031, |
|
"grad_norm": 1.0197266938781353, |
|
"learning_rate": 6.1954424342902e-07, |
|
"loss": 0.0282, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 1.6814946619217082, |
|
"grad_norm": 1.2710366924314067, |
|
"learning_rate": 6.128233517765448e-07, |
|
"loss": 0.0327, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.6832740213523132, |
|
"grad_norm": 1.4037521423367485, |
|
"learning_rate": 6.061367331114992e-07, |
|
"loss": 0.0384, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 1.685053380782918, |
|
"grad_norm": 1.6806105771035735, |
|
"learning_rate": 5.994844396703025e-07, |
|
"loss": 0.0501, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 1.6868327402135233, |
|
"grad_norm": 1.333481395027055, |
|
"learning_rate": 5.928665234212233e-07, |
|
"loss": 0.0298, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 1.688612099644128, |
|
"grad_norm": 0.9673724807686974, |
|
"learning_rate": 5.862830360639698e-07, |
|
"loss": 0.0237, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 1.690391459074733, |
|
"grad_norm": 1.7203231349125754, |
|
"learning_rate": 5.797340290292907e-07, |
|
"loss": 0.035, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.6921708185053381, |
|
"grad_norm": 1.3972479445973882, |
|
"learning_rate": 5.732195534785723e-07, |
|
"loss": 0.0375, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 1.693950177935943, |
|
"grad_norm": 1.4505000178732557, |
|
"learning_rate": 5.667396603034369e-07, |
|
"loss": 0.0366, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 1.6957295373665482, |
|
"grad_norm": 1.5219215239490382, |
|
"learning_rate": 5.602944001253486e-07, |
|
"loss": 0.0342, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 1.697508896797153, |
|
"grad_norm": 1.2366742639170782, |
|
"learning_rate": 5.538838232952104e-07, |
|
"loss": 0.0276, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 1.699288256227758, |
|
"grad_norm": 1.8712833810272373, |
|
"learning_rate": 5.475079798929816e-07, |
|
"loss": 0.0456, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.701067615658363, |
|
"grad_norm": 0.8412038996326763, |
|
"learning_rate": 5.411669197272795e-07, |
|
"loss": 0.0161, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 1.7028469750889679, |
|
"grad_norm": 1.0457489487773088, |
|
"learning_rate": 5.348606923349903e-07, |
|
"loss": 0.0267, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 1.704626334519573, |
|
"grad_norm": 0.9746185683942371, |
|
"learning_rate": 5.285893469808855e-07, |
|
"loss": 0.0259, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 1.706405693950178, |
|
"grad_norm": 1.5587151145322626, |
|
"learning_rate": 5.223529326572352e-07, |
|
"loss": 0.0446, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 1.708185053380783, |
|
"grad_norm": 1.5039164897247903, |
|
"learning_rate": 5.161514980834232e-07, |
|
"loss": 0.032, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.709964412811388, |
|
"grad_norm": 1.250756814010595, |
|
"learning_rate": 5.099850917055709e-07, |
|
"loss": 0.0295, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 1.7117437722419928, |
|
"grad_norm": 1.5912400055878162, |
|
"learning_rate": 5.038537616961559e-07, |
|
"loss": 0.042, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 1.7135231316725978, |
|
"grad_norm": 1.237492169104337, |
|
"learning_rate": 4.977575559536358e-07, |
|
"loss": 0.0281, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 1.7153024911032029, |
|
"grad_norm": 1.759880990124191, |
|
"learning_rate": 4.916965221020753e-07, |
|
"loss": 0.0441, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 1.7170818505338077, |
|
"grad_norm": 1.3052328399705417, |
|
"learning_rate": 4.856707074907729e-07, |
|
"loss": 0.0327, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.718861209964413, |
|
"grad_norm": 1.7193250726034992, |
|
"learning_rate": 4.796801591938922e-07, |
|
"loss": 0.0451, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 1.7206405693950177, |
|
"grad_norm": 1.2001617909264504, |
|
"learning_rate": 4.737249240100911e-07, |
|
"loss": 0.0274, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 1.7224199288256228, |
|
"grad_norm": 1.298970292006101, |
|
"learning_rate": 4.6780504846216155e-07, |
|
"loss": 0.0277, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 1.7241992882562278, |
|
"grad_norm": 1.1247231256879973, |
|
"learning_rate": 4.619205787966613e-07, |
|
"loss": 0.025, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 1.7259786476868326, |
|
"grad_norm": 1.1419521469229656, |
|
"learning_rate": 4.560715609835548e-07, |
|
"loss": 0.0358, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.7277580071174379, |
|
"grad_norm": 1.296584906143253, |
|
"learning_rate": 4.5025804071585464e-07, |
|
"loss": 0.0312, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 1.7295373665480427, |
|
"grad_norm": 1.50306672427868, |
|
"learning_rate": 4.4448006340926163e-07, |
|
"loss": 0.0399, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 1.7313167259786477, |
|
"grad_norm": 1.052892307986346, |
|
"learning_rate": 4.3873767420181344e-07, |
|
"loss": 0.0247, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 1.7330960854092528, |
|
"grad_norm": 1.166756764742125, |
|
"learning_rate": 4.3303091795353024e-07, |
|
"loss": 0.0286, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 1.7348754448398576, |
|
"grad_norm": 1.5788275256929583, |
|
"learning_rate": 4.2735983924606596e-07, |
|
"loss": 0.0401, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.7366548042704626, |
|
"grad_norm": 1.5024106623983446, |
|
"learning_rate": 4.2172448238235464e-07, |
|
"loss": 0.0408, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 1.7384341637010676, |
|
"grad_norm": 1.545967123564301, |
|
"learning_rate": 4.161248913862731e-07, |
|
"loss": 0.043, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 1.7402135231316724, |
|
"grad_norm": 1.2501991126492509, |
|
"learning_rate": 4.1056111000228937e-07, |
|
"loss": 0.0356, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 1.7419928825622777, |
|
"grad_norm": 1.6994107897971127, |
|
"learning_rate": 4.0503318169512417e-07, |
|
"loss": 0.0407, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 1.7437722419928825, |
|
"grad_norm": 1.315236167779905, |
|
"learning_rate": 3.9954114964941336e-07, |
|
"loss": 0.0282, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.7455516014234875, |
|
"grad_norm": 1.678218457835779, |
|
"learning_rate": 3.9408505676936327e-07, |
|
"loss": 0.0493, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 1.7473309608540926, |
|
"grad_norm": 1.4681939785773617, |
|
"learning_rate": 3.886649456784253e-07, |
|
"loss": 0.0409, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 1.7491103202846974, |
|
"grad_norm": 1.5899376890931587, |
|
"learning_rate": 3.8328085871895624e-07, |
|
"loss": 0.0526, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 1.7508896797153026, |
|
"grad_norm": 1.2579351265921612, |
|
"learning_rate": 3.779328379518898e-07, |
|
"loss": 0.0355, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 1.7526690391459074, |
|
"grad_norm": 1.154390706401381, |
|
"learning_rate": 3.7262092515640556e-07, |
|
"loss": 0.0264, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.7544483985765125, |
|
"grad_norm": 1.1027570277216359, |
|
"learning_rate": 3.673451618296081e-07, |
|
"loss": 0.0267, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 1.7562277580071175, |
|
"grad_norm": 1.2268855125971485, |
|
"learning_rate": 3.621055891861963e-07, |
|
"loss": 0.0359, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 1.7580071174377223, |
|
"grad_norm": 1.4531917801899834, |
|
"learning_rate": 3.56902248158148e-07, |
|
"loss": 0.0295, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 1.7597864768683276, |
|
"grad_norm": 1.1045935333197077, |
|
"learning_rate": 3.517351793943913e-07, |
|
"loss": 0.0257, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 1.7615658362989324, |
|
"grad_norm": 1.7106256536462636, |
|
"learning_rate": 3.4660442326049704e-07, |
|
"loss": 0.0367, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.7633451957295374, |
|
"grad_norm": 1.2327954808437651, |
|
"learning_rate": 3.4151001983835696e-07, |
|
"loss": 0.027, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 1.7651245551601424, |
|
"grad_norm": 1.1840589042230343, |
|
"learning_rate": 3.364520089258727e-07, |
|
"loss": 0.025, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 1.7669039145907472, |
|
"grad_norm": 1.3561992184866398, |
|
"learning_rate": 3.314304300366461e-07, |
|
"loss": 0.0377, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 1.7686832740213523, |
|
"grad_norm": 1.193711775658132, |
|
"learning_rate": 3.2644532239966444e-07, |
|
"loss": 0.0318, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 1.7704626334519573, |
|
"grad_norm": 0.8336790128382712, |
|
"learning_rate": 3.2149672495900286e-07, |
|
"loss": 0.0238, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.7722419928825621, |
|
"grad_norm": 1.0907281259357418, |
|
"learning_rate": 3.165846763735153e-07, |
|
"loss": 0.0318, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 1.7740213523131674, |
|
"grad_norm": 1.4657559126535757, |
|
"learning_rate": 3.117092150165324e-07, |
|
"loss": 0.043, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 1.7758007117437722, |
|
"grad_norm": 1.9043989774769703, |
|
"learning_rate": 3.068703789755606e-07, |
|
"loss": 0.0523, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 1.7775800711743772, |
|
"grad_norm": 1.2130700836868982, |
|
"learning_rate": 3.020682060519886e-07, |
|
"loss": 0.0287, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 1.7793594306049823, |
|
"grad_norm": 1.4389418616105274, |
|
"learning_rate": 2.9730273376078923e-07, |
|
"loss": 0.0392, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.7793594306049823, |
|
"eval_loss": 0.11300025135278702, |
|
"eval_runtime": 2.8505, |
|
"eval_samples_per_second": 16.137, |
|
"eval_steps_per_second": 4.21, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.781138790035587, |
|
"grad_norm": 1.3601586431777726, |
|
"learning_rate": 2.9257399933022737e-07, |
|
"loss": 0.0359, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 1.7829181494661923, |
|
"grad_norm": 1.4697182380796958, |
|
"learning_rate": 2.8788203970156805e-07, |
|
"loss": 0.0465, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 1.7846975088967971, |
|
"grad_norm": 1.377129275794295, |
|
"learning_rate": 2.832268915287878e-07, |
|
"loss": 0.0352, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 1.7864768683274022, |
|
"grad_norm": 1.458246352492685, |
|
"learning_rate": 2.7860859117828985e-07, |
|
"loss": 0.0384, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 1.7882562277580072, |
|
"grad_norm": 1.4085734172898507, |
|
"learning_rate": 2.740271747286194e-07, |
|
"loss": 0.0389, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.790035587188612, |
|
"grad_norm": 2.012511127251897, |
|
"learning_rate": 2.6948267797018145e-07, |
|
"loss": 0.0418, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 1.791814946619217, |
|
"grad_norm": 1.1732494654647039, |
|
"learning_rate": 2.649751364049613e-07, |
|
"loss": 0.0205, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 1.793594306049822, |
|
"grad_norm": 1.4160813249731823, |
|
"learning_rate": 2.6050458524624735e-07, |
|
"loss": 0.0303, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 1.795373665480427, |
|
"grad_norm": 1.471855055234415, |
|
"learning_rate": 2.560710594183552e-07, |
|
"loss": 0.0318, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 1.7971530249110321, |
|
"grad_norm": 1.3654072089902827, |
|
"learning_rate": 2.5167459355635524e-07, |
|
"loss": 0.0349, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.798932384341637, |
|
"grad_norm": 1.5414543607118507, |
|
"learning_rate": 2.473152220058039e-07, |
|
"loss": 0.0385, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 1.800711743772242, |
|
"grad_norm": 1.2944836099840882, |
|
"learning_rate": 2.429929788224722e-07, |
|
"loss": 0.033, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 1.802491103202847, |
|
"grad_norm": 1.1346685103339893, |
|
"learning_rate": 2.38707897772083e-07, |
|
"loss": 0.0364, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 1.8042704626334518, |
|
"grad_norm": 1.4741265353633035, |
|
"learning_rate": 2.3446001233004333e-07, |
|
"loss": 0.0406, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 1.806049822064057, |
|
"grad_norm": 1.0788596550124212, |
|
"learning_rate": 2.3024935568118745e-07, |
|
"loss": 0.0253, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.8078291814946619, |
|
"grad_norm": 1.320566305572623, |
|
"learning_rate": 2.2607596071951288e-07, |
|
"loss": 0.0347, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 1.809608540925267, |
|
"grad_norm": 1.156346571440622, |
|
"learning_rate": 2.2193986004792667e-07, |
|
"loss": 0.0344, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 1.811387900355872, |
|
"grad_norm": 0.9819449614702166, |
|
"learning_rate": 2.1784108597799058e-07, |
|
"loss": 0.0197, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 1.8131672597864767, |
|
"grad_norm": 1.0456209863967612, |
|
"learning_rate": 2.1377967052966685e-07, |
|
"loss": 0.0185, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 1.814946619217082, |
|
"grad_norm": 0.901165226305563, |
|
"learning_rate": 2.0975564543107007e-07, |
|
"loss": 0.0213, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.8167259786476868, |
|
"grad_norm": 1.3257757958225824, |
|
"learning_rate": 2.057690421182168e-07, |
|
"loss": 0.0295, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 1.8185053380782918, |
|
"grad_norm": 0.9905368366318806, |
|
"learning_rate": 2.01819891734783e-07, |
|
"loss": 0.0231, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 1.8202846975088969, |
|
"grad_norm": 1.2632031637626522, |
|
"learning_rate": 1.979082251318576e-07, |
|
"loss": 0.0305, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 1.8220640569395017, |
|
"grad_norm": 1.1074855814579556, |
|
"learning_rate": 1.9403407286770592e-07, |
|
"loss": 0.0223, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 1.8238434163701067, |
|
"grad_norm": 1.868137921107018, |
|
"learning_rate": 1.9019746520752502e-07, |
|
"loss": 0.0525, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.8256227758007118, |
|
"grad_norm": 1.1526046100884755, |
|
"learning_rate": 1.8639843212321206e-07, |
|
"loss": 0.0245, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 1.8274021352313166, |
|
"grad_norm": 1.1049722673060227, |
|
"learning_rate": 1.826370032931285e-07, |
|
"loss": 0.0295, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 1.8291814946619218, |
|
"grad_norm": 1.533769471219323, |
|
"learning_rate": 1.789132081018674e-07, |
|
"loss": 0.0434, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 1.8309608540925266, |
|
"grad_norm": 1.2567412180558952, |
|
"learning_rate": 1.7522707564002706e-07, |
|
"loss": 0.0292, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 1.8327402135231317, |
|
"grad_norm": 1.69900571935297, |
|
"learning_rate": 1.7157863470397718e-07, |
|
"loss": 0.0424, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.8345195729537367, |
|
"grad_norm": 1.7607149112751312, |
|
"learning_rate": 1.6796791379564138e-07, |
|
"loss": 0.0407, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 1.8362989323843415, |
|
"grad_norm": 1.012413518780093, |
|
"learning_rate": 1.6439494112227173e-07, |
|
"loss": 0.0242, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 1.8380782918149468, |
|
"grad_norm": 1.337559136902663, |
|
"learning_rate": 1.6085974459622567e-07, |
|
"loss": 0.034, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 1.8398576512455516, |
|
"grad_norm": 1.1136568223336052, |
|
"learning_rate": 1.573623518347517e-07, |
|
"loss": 0.0271, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 1.8416370106761566, |
|
"grad_norm": 1.1930755790053005, |
|
"learning_rate": 1.5390279015977117e-07, |
|
"loss": 0.0291, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 1.8434163701067616, |
|
"grad_norm": 1.5610087476668963, |
|
"learning_rate": 1.5048108659766693e-07, |
|
"loss": 0.0388, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 1.8451957295373664, |
|
"grad_norm": 1.0743815763393307, |
|
"learning_rate": 1.470972678790711e-07, |
|
"loss": 0.0343, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 1.8469750889679717, |
|
"grad_norm": 1.3788404938160796, |
|
"learning_rate": 1.437513604386559e-07, |
|
"loss": 0.0317, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 1.8487544483985765, |
|
"grad_norm": 1.3214089228987036, |
|
"learning_rate": 1.404433904149266e-07, |
|
"loss": 0.0259, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 1.8505338078291815, |
|
"grad_norm": 1.3231283489145242, |
|
"learning_rate": 1.3717338365001943e-07, |
|
"loss": 0.0287, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.8523131672597866, |
|
"grad_norm": 1.8753585002437558, |
|
"learning_rate": 1.3394136568949834e-07, |
|
"loss": 0.0594, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 1.8540925266903914, |
|
"grad_norm": 1.6342844674829058, |
|
"learning_rate": 1.307473617821553e-07, |
|
"loss": 0.0376, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 1.8558718861209964, |
|
"grad_norm": 1.7006024597453402, |
|
"learning_rate": 1.275913968798137e-07, |
|
"loss": 0.0372, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 1.8576512455516014, |
|
"grad_norm": 1.3258397750195754, |
|
"learning_rate": 1.2447349563713186e-07, |
|
"loss": 0.0294, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 1.8594306049822062, |
|
"grad_norm": 1.067736357619436, |
|
"learning_rate": 1.213936824114137e-07, |
|
"loss": 0.0227, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.8612099644128115, |
|
"grad_norm": 1.8092620929551444, |
|
"learning_rate": 1.1835198126241509e-07, |
|
"loss": 0.0431, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 1.8629893238434163, |
|
"grad_norm": 1.458776210774106, |
|
"learning_rate": 1.1534841595215617e-07, |
|
"loss": 0.0432, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 1.8647686832740213, |
|
"grad_norm": 1.221799420180028, |
|
"learning_rate": 1.1238300994473983e-07, |
|
"loss": 0.0297, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 1.8665480427046264, |
|
"grad_norm": 1.513504914035125, |
|
"learning_rate": 1.0945578640616183e-07, |
|
"loss": 0.037, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 1.8683274021352312, |
|
"grad_norm": 1.0335150927864094, |
|
"learning_rate": 1.0656676820413603e-07, |
|
"loss": 0.0198, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.8701067615658364, |
|
"grad_norm": 1.2508049232310505, |
|
"learning_rate": 1.0371597790791166e-07, |
|
"loss": 0.0381, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 1.8718861209964412, |
|
"grad_norm": 1.5356206620538395, |
|
"learning_rate": 1.0090343778809908e-07, |
|
"loss": 0.0342, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 1.8736654804270463, |
|
"grad_norm": 1.0773834521921104, |
|
"learning_rate": 9.812916981649433e-08, |
|
"loss": 0.0229, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 1.8754448398576513, |
|
"grad_norm": 1.4172274107967047, |
|
"learning_rate": 9.539319566590766e-08, |
|
"loss": 0.0383, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 1.8772241992882561, |
|
"grad_norm": 1.2423559000628233, |
|
"learning_rate": 9.269553670999743e-08, |
|
"loss": 0.0319, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 1.8790035587188612, |
|
"grad_norm": 1.7655558987044344, |
|
"learning_rate": 9.003621402309815e-08, |
|
"loss": 0.0403, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 1.8807829181494662, |
|
"grad_norm": 1.6569590734730415, |
|
"learning_rate": 8.741524838005888e-08, |
|
"loss": 0.033, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 1.8825622775800712, |
|
"grad_norm": 1.3544315563012186, |
|
"learning_rate": 8.483266025608061e-08, |
|
"loss": 0.0312, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 1.8843416370106763, |
|
"grad_norm": 1.380529694778548, |
|
"learning_rate": 8.228846982655525e-08, |
|
"loss": 0.0271, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 1.886120996441281, |
|
"grad_norm": 1.0553204014931397, |
|
"learning_rate": 7.978269696691021e-08, |
|
"loss": 0.0263, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.887900355871886, |
|
"grad_norm": 1.6103263325873878, |
|
"learning_rate": 7.731536125244965e-08, |
|
"loss": 0.0444, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 1.8896797153024911, |
|
"grad_norm": 1.36609100774501, |
|
"learning_rate": 7.488648195820513e-08, |
|
"loss": 0.0395, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 1.891459074733096, |
|
"grad_norm": 1.694179035978184, |
|
"learning_rate": 7.249607805878245e-08, |
|
"loss": 0.0385, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 1.8932384341637012, |
|
"grad_norm": 1.3895241444631032, |
|
"learning_rate": 7.014416822821557e-08, |
|
"loss": 0.0356, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 1.895017793594306, |
|
"grad_norm": 1.1730038194517638, |
|
"learning_rate": 6.783077083981793e-08, |
|
"loss": 0.0313, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 1.896797153024911, |
|
"grad_norm": 1.235200185275907, |
|
"learning_rate": 6.55559039660425e-08, |
|
"loss": 0.0362, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 1.898576512455516, |
|
"grad_norm": 1.3239678835210775, |
|
"learning_rate": 6.331958537833693e-08, |
|
"loss": 0.0354, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 1.9003558718861209, |
|
"grad_norm": 1.6068476112913732, |
|
"learning_rate": 6.112183254700866e-08, |
|
"loss": 0.0433, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 1.9021352313167261, |
|
"grad_norm": 2.3283443136092496, |
|
"learning_rate": 5.8962662641083856e-08, |
|
"loss": 0.0425, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 1.903914590747331, |
|
"grad_norm": 1.3144454849272535, |
|
"learning_rate": 5.6842092528176516e-08, |
|
"loss": 0.0341, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.905693950177936, |
|
"grad_norm": 1.1013207359137223, |
|
"learning_rate": 5.476013877435626e-08, |
|
"loss": 0.0269, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 1.907473309608541, |
|
"grad_norm": 1.493030661405832, |
|
"learning_rate": 5.271681764401848e-08, |
|
"loss": 0.0411, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 1.9092526690391458, |
|
"grad_norm": 1.061962174270792, |
|
"learning_rate": 5.071214509975775e-08, |
|
"loss": 0.0301, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 1.9110320284697508, |
|
"grad_norm": 1.3805675672207063, |
|
"learning_rate": 4.8746136802240716e-08, |
|
"loss": 0.0372, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 1.9128113879003559, |
|
"grad_norm": 1.7385653267070853, |
|
"learning_rate": 4.6818808110087875e-08, |
|
"loss": 0.0401, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.914590747330961, |
|
"grad_norm": 1.8268607637313525, |
|
"learning_rate": 4.493017407975087e-08, |
|
"loss": 0.0482, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 1.916370106761566, |
|
"grad_norm": 1.4365840525893336, |
|
"learning_rate": 4.308024946539424e-08, |
|
"loss": 0.0341, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 1.9181494661921707, |
|
"grad_norm": 1.485712310240352, |
|
"learning_rate": 4.1269048718783344e-08, |
|
"loss": 0.0306, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 1.9199288256227758, |
|
"grad_norm": 1.0593771617229601, |
|
"learning_rate": 3.9496585989167726e-08, |
|
"loss": 0.0254, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 1.9217081850533808, |
|
"grad_norm": 1.2264153737516374, |
|
"learning_rate": 3.776287512317345e-08, |
|
"loss": 0.0392, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.9234875444839856, |
|
"grad_norm": 1.3399548773648084, |
|
"learning_rate": 3.606792966469375e-08, |
|
"loss": 0.0277, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 1.9252669039145909, |
|
"grad_norm": 1.4183662333396714, |
|
"learning_rate": 3.4411762854782426e-08, |
|
"loss": 0.0308, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 1.9270462633451957, |
|
"grad_norm": 1.663259968404814, |
|
"learning_rate": 3.279438763155174e-08, |
|
"loss": 0.0323, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 1.9288256227758007, |
|
"grad_norm": 1.1691353180282509, |
|
"learning_rate": 3.121581663007134e-08, |
|
"loss": 0.0321, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 1.9306049822064058, |
|
"grad_norm": 0.8704627846002906, |
|
"learning_rate": 2.967606218226837e-08, |
|
"loss": 0.0207, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.9323843416370106, |
|
"grad_norm": 1.1530251942710437, |
|
"learning_rate": 2.8175136316832e-08, |
|
"loss": 0.0263, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 1.9341637010676158, |
|
"grad_norm": 1.2224323565208497, |
|
"learning_rate": 2.6713050759120117e-08, |
|
"loss": 0.0357, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 1.9359430604982206, |
|
"grad_norm": 1.416863224955437, |
|
"learning_rate": 2.528981693106558e-08, |
|
"loss": 0.0316, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 1.9377224199288257, |
|
"grad_norm": 1.1759861272752814, |
|
"learning_rate": 2.3905445951089013e-08, |
|
"loss": 0.0321, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 1.9395017793594307, |
|
"grad_norm": 1.3087544966323295, |
|
"learning_rate": 2.2559948634011673e-08, |
|
"loss": 0.0281, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.9412811387900355, |
|
"grad_norm": 1.2073389254532458, |
|
"learning_rate": 2.125333549096942e-08, |
|
"loss": 0.0306, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 1.9430604982206405, |
|
"grad_norm": 1.1831677918839518, |
|
"learning_rate": 1.9985616729332747e-08, |
|
"loss": 0.0349, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 1.9448398576512456, |
|
"grad_norm": 1.0994648484317324, |
|
"learning_rate": 1.8756802252625773e-08, |
|
"loss": 0.0298, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 1.9466192170818504, |
|
"grad_norm": 1.135600149226652, |
|
"learning_rate": 1.75669016604485e-08, |
|
"loss": 0.0249, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 1.9483985765124556, |
|
"grad_norm": 1.5061787336433856, |
|
"learning_rate": 1.6415924248403547e-08, |
|
"loss": 0.0438, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.9501779359430604, |
|
"grad_norm": 1.1862083554221274, |
|
"learning_rate": 1.5303879008021773e-08, |
|
"loss": 0.03, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 1.9519572953736655, |
|
"grad_norm": 1.2250526008839544, |
|
"learning_rate": 1.4230774626691756e-08, |
|
"loss": 0.0225, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 1.9537366548042705, |
|
"grad_norm": 1.3215786046191311, |
|
"learning_rate": 1.3196619487594875e-08, |
|
"loss": 0.0286, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 1.9555160142348753, |
|
"grad_norm": 1.8686762657546636, |
|
"learning_rate": 1.2201421669636448e-08, |
|
"loss": 0.039, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 1.9572953736654806, |
|
"grad_norm": 1.0435077097489422, |
|
"learning_rate": 1.1245188947384133e-08, |
|
"loss": 0.0241, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.9590747330960854, |
|
"grad_norm": 1.6610766868768847, |
|
"learning_rate": 1.0327928791006858e-08, |
|
"loss": 0.0293, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 1.9608540925266904, |
|
"grad_norm": 1.150551987240421, |
|
"learning_rate": 9.449648366217645e-09, |
|
"loss": 0.0281, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 1.9626334519572954, |
|
"grad_norm": 1.242762102338035, |
|
"learning_rate": 8.61035453421588e-09, |
|
"loss": 0.0284, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 1.9644128113879002, |
|
"grad_norm": 1.362017935495652, |
|
"learning_rate": 7.81005385163458e-09, |
|
"loss": 0.0311, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 1.9661921708185055, |
|
"grad_norm": 0.8603547980729914, |
|
"learning_rate": 7.048752570488205e-09, |
|
"loss": 0.0231, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.9679715302491103, |
|
"grad_norm": 1.6761092623462361, |
|
"learning_rate": 6.326456638125478e-09, |
|
"loss": 0.0506, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 1.9697508896797153, |
|
"grad_norm": 1.1717767505497123, |
|
"learning_rate": 5.643171697183314e-09, |
|
"loss": 0.0281, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 1.9715302491103204, |
|
"grad_norm": 1.4273903724307317, |
|
"learning_rate": 4.998903085539075e-09, |
|
"loss": 0.037, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 1.9733096085409252, |
|
"grad_norm": 1.218592511914452, |
|
"learning_rate": 4.393655836272825e-09, |
|
"loss": 0.0321, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 1.9750889679715302, |
|
"grad_norm": 1.5214906411844282, |
|
"learning_rate": 3.8274346776262514e-09, |
|
"loss": 0.034, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.9768683274021353, |
|
"grad_norm": 1.2266159595608923, |
|
"learning_rate": 3.300244032966582e-09, |
|
"loss": 0.0254, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 1.97864768683274, |
|
"grad_norm": 1.3541524203289064, |
|
"learning_rate": 2.8120880207493928e-09, |
|
"loss": 0.0289, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 1.9804270462633453, |
|
"grad_norm": 1.2875784348025265, |
|
"learning_rate": 2.362970454491409e-09, |
|
"loss": 0.0266, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 1.9822064056939501, |
|
"grad_norm": 1.3761230414177241, |
|
"learning_rate": 1.952894842735531e-09, |
|
"loss": 0.0265, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 1.9839857651245552, |
|
"grad_norm": 1.0797647823944452, |
|
"learning_rate": 1.5818643890258555e-09, |
|
"loss": 0.0319, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.9857651245551602, |
|
"grad_norm": 1.8205576199002294, |
|
"learning_rate": 1.2498819918843609e-09, |
|
"loss": 0.0362, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 1.987544483985765, |
|
"grad_norm": 1.5307490530590686, |
|
"learning_rate": 9.569502447837053e-10, |
|
"loss": 0.0456, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 1.9893238434163703, |
|
"grad_norm": 1.2328639251693183, |
|
"learning_rate": 7.03071436131686e-10, |
|
"loss": 0.0296, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 1.991103202846975, |
|
"grad_norm": 0.9676459542602631, |
|
"learning_rate": 4.882475492506977e-10, |
|
"loss": 0.0241, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 1.99288256227758, |
|
"grad_norm": 2.028873733291517, |
|
"learning_rate": 3.124802623627465e-10, |
|
"loss": 0.0669, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.9946619217081851, |
|
"grad_norm": 1.5585068389133452, |
|
"learning_rate": 1.7577094857557097e-10, |
|
"loss": 0.042, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 1.99644128113879, |
|
"grad_norm": 1.1931982575368127, |
|
"learning_rate": 7.812067587487093e-11, |
|
"loss": 0.0264, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 1.998220640569395, |
|
"grad_norm": 1.3076067498830473, |
|
"learning_rate": 1.9530207111539967e-11, |
|
"loss": 0.0292, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.7884426434998218, |
|
"learning_rate": 0.0, |
|
"loss": 0.0177, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 1124, |
|
"total_flos": 8352928628736.0, |
|
"train_loss": 0.07440683467883709, |
|
"train_runtime": 1285.8872, |
|
"train_samples_per_second": 6.988, |
|
"train_steps_per_second": 0.874 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1124, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 2000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 8352928628736.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|