|
[
|
|
{
|
|
"loss": 1.3916,
|
|
"grad_norm": 7.633892059326172,
|
|
"learning_rate": 1.9831190798376187e-05,
|
|
"epoch": 0.08457374830852503,
|
|
"step": 500
|
|
},
|
|
{
|
|
"loss": 1.2408,
|
|
"grad_norm": 4.5788655281066895,
|
|
"learning_rate": 1.9662043301759137e-05,
|
|
"epoch": 0.16914749661705006,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"loss": 1.2028,
|
|
"grad_norm": 5.243614673614502,
|
|
"learning_rate": 1.9492895805142083e-05,
|
|
"epoch": 0.25372124492557513,
|
|
"step": 1500
|
|
},
|
|
{
|
|
"loss": 1.1534,
|
|
"grad_norm": 3.415882110595703,
|
|
"learning_rate": 1.9323748308525033e-05,
|
|
"epoch": 0.3382949932341001,
|
|
"step": 2000
|
|
},
|
|
{
|
|
"loss": 1.1093,
|
|
"grad_norm": 3.2337677478790283,
|
|
"learning_rate": 1.9154600811907986e-05,
|
|
"epoch": 0.42286874154262516,
|
|
"step": 2500
|
|
},
|
|
{
|
|
"loss": 1.0754,
|
|
"grad_norm": 3.4433956146240234,
|
|
"learning_rate": 1.8985453315290936e-05,
|
|
"epoch": 0.5074424898511503,
|
|
"step": 3000
|
|
},
|
|
{
|
|
"loss": 1.0467,
|
|
"grad_norm": 4.641908168792725,
|
|
"learning_rate": 1.8816305818673886e-05,
|
|
"epoch": 0.5920162381596752,
|
|
"step": 3500
|
|
},
|
|
{
|
|
"loss": 1.0355,
|
|
"grad_norm": 3.7774765491485596,
|
|
"learning_rate": 1.8647158322056836e-05,
|
|
"epoch": 0.6765899864682002,
|
|
"step": 4000
|
|
},
|
|
{
|
|
"loss": 1.0421,
|
|
"grad_norm": 3.130302906036377,
|
|
"learning_rate": 1.8478349120433018e-05,
|
|
"epoch": 0.7611637347767253,
|
|
"step": 4500
|
|
},
|
|
{
|
|
"loss": 1.0158,
|
|
"grad_norm": 2.876955509185791,
|
|
"learning_rate": 1.8309201623815968e-05,
|
|
"epoch": 0.8457374830852503,
|
|
"step": 5000
|
|
},
|
|
{
|
|
"loss": 1.0,
|
|
"grad_norm": 3.218794107437134,
|
|
"learning_rate": 1.8140054127198918e-05,
|
|
"epoch": 0.9303112313937754,
|
|
"step": 5500
|
|
},
|
|
{
|
|
"loss": 0.9665,
|
|
"grad_norm": 2.9098243713378906,
|
|
"learning_rate": 1.7971244925575103e-05,
|
|
"epoch": 1.0148849797023005,
|
|
"step": 6000
|
|
},
|
|
{
|
|
"loss": 0.8815,
|
|
"grad_norm": 2.203686475753784,
|
|
"learning_rate": 1.7802097428958052e-05,
|
|
"epoch": 1.0994587280108254,
|
|
"step": 6500
|
|
},
|
|
{
|
|
"loss": 0.9049,
|
|
"grad_norm": 3.389420986175537,
|
|
"learning_rate": 1.7632949932341002e-05,
|
|
"epoch": 1.1840324763193504,
|
|
"step": 7000
|
|
},
|
|
{
|
|
"loss": 0.9059,
|
|
"grad_norm": 2.778923749923706,
|
|
"learning_rate": 1.7463802435723952e-05,
|
|
"epoch": 1.2686062246278755,
|
|
"step": 7500
|
|
},
|
|
{
|
|
"loss": 0.8858,
|
|
"grad_norm": 5.216675758361816,
|
|
"learning_rate": 1.7294993234100137e-05,
|
|
"epoch": 1.3531799729364005,
|
|
"step": 8000
|
|
},
|
|
{
|
|
"loss": 0.8881,
|
|
"grad_norm": 3.9376771450042725,
|
|
"learning_rate": 1.7125845737483087e-05,
|
|
"epoch": 1.4377537212449256,
|
|
"step": 8500
|
|
},
|
|
{
|
|
"loss": 0.8885,
|
|
"grad_norm": 3.8835389614105225,
|
|
"learning_rate": 1.6956698240866037e-05,
|
|
"epoch": 1.5223274695534506,
|
|
"step": 9000
|
|
},
|
|
{
|
|
"loss": 0.8774,
|
|
"grad_norm": 3.470211982727051,
|
|
"learning_rate": 1.6787550744248987e-05,
|
|
"epoch": 1.6069012178619757,
|
|
"step": 9500
|
|
},
|
|
{
|
|
"loss": 0.872,
|
|
"grad_norm": 3.03437876701355,
|
|
"learning_rate": 1.6618403247631937e-05,
|
|
"epoch": 1.6914749661705006,
|
|
"step": 10000
|
|
},
|
|
{
|
|
"loss": 0.8883,
|
|
"grad_norm": 3.5217363834381104,
|
|
"learning_rate": 1.6449255751014887e-05,
|
|
"epoch": 1.7760487144790256,
|
|
"step": 10500
|
|
},
|
|
{
|
|
"loss": 0.8614,
|
|
"grad_norm": 3.965338706970215,
|
|
"learning_rate": 1.6280446549391072e-05,
|
|
"epoch": 1.8606224627875507,
|
|
"step": 11000
|
|
},
|
|
{
|
|
"loss": 0.8682,
|
|
"grad_norm": 2.316436767578125,
|
|
"learning_rate": 1.6111299052774022e-05,
|
|
"epoch": 1.9451962110960759,
|
|
"step": 11500
|
|
},
|
|
{
|
|
"loss": 0.8286,
|
|
"grad_norm": 3.9571282863616943,
|
|
"learning_rate": 1.5942151556156972e-05,
|
|
"epoch": 2.029769959404601,
|
|
"step": 12000
|
|
},
|
|
{
|
|
"loss": 0.7808,
|
|
"grad_norm": 3.676339864730835,
|
|
"learning_rate": 1.577300405953992e-05,
|
|
"epoch": 2.1143437077131257,
|
|
"step": 12500
|
|
},
|
|
{
|
|
"loss": 0.7811,
|
|
"grad_norm": 2.8704166412353516,
|
|
"learning_rate": 1.560385656292287e-05,
|
|
"epoch": 2.198917456021651,
|
|
"step": 13000
|
|
},
|
|
{
|
|
"loss": 0.8,
|
|
"grad_norm": 4.323342800140381,
|
|
"learning_rate": 1.5434709066305818e-05,
|
|
"epoch": 2.283491204330176,
|
|
"step": 13500
|
|
},
|
|
{
|
|
"loss": 0.79,
|
|
"grad_norm": 2.5053164958953857,
|
|
"learning_rate": 1.5265561569688768e-05,
|
|
"epoch": 2.3680649526387008,
|
|
"step": 14000
|
|
},
|
|
{
|
|
"loss": 0.7849,
|
|
"grad_norm": 3.5477893352508545,
|
|
"learning_rate": 1.509641407307172e-05,
|
|
"epoch": 2.452638700947226,
|
|
"step": 14500
|
|
},
|
|
{
|
|
"loss": 0.7839,
|
|
"grad_norm": 3.699144124984741,
|
|
"learning_rate": 1.4927604871447903e-05,
|
|
"epoch": 2.537212449255751,
|
|
"step": 15000
|
|
},
|
|
{
|
|
"loss": 0.799,
|
|
"grad_norm": 2.969682455062866,
|
|
"learning_rate": 1.4758795669824088e-05,
|
|
"epoch": 2.621786197564276,
|
|
"step": 15500
|
|
},
|
|
{
|
|
"loss": 0.7799,
|
|
"grad_norm": 4.236715316772461,
|
|
"learning_rate": 1.4589648173207038e-05,
|
|
"epoch": 2.706359945872801,
|
|
"step": 16000
|
|
},
|
|
{
|
|
"loss": 0.7756,
|
|
"grad_norm": 3.2528302669525146,
|
|
"learning_rate": 1.4420500676589988e-05,
|
|
"epoch": 2.790933694181326,
|
|
"step": 16500
|
|
},
|
|
{
|
|
"loss": 0.7837,
|
|
"grad_norm": 4.050159454345703,
|
|
"learning_rate": 1.4251353179972938e-05,
|
|
"epoch": 2.8755074424898512,
|
|
"step": 17000
|
|
},
|
|
{
|
|
"loss": 0.7907,
|
|
"grad_norm": 3.53711199760437,
|
|
"learning_rate": 1.4082543978349121e-05,
|
|
"epoch": 2.960081190798376,
|
|
"step": 17500
|
|
},
|
|
{
|
|
"loss": 0.7533,
|
|
"grad_norm": 4.171680927276611,
|
|
"learning_rate": 1.3913396481732071e-05,
|
|
"epoch": 3.044654939106901,
|
|
"step": 18000
|
|
},
|
|
{
|
|
"loss": 0.724,
|
|
"grad_norm": 3.026613712310791,
|
|
"learning_rate": 1.3744248985115021e-05,
|
|
"epoch": 3.1292286874154263,
|
|
"step": 18500
|
|
},
|
|
{
|
|
"loss": 0.7289,
|
|
"grad_norm": 3.7827978134155273,
|
|
"learning_rate": 1.357510148849797e-05,
|
|
"epoch": 3.2138024357239514,
|
|
"step": 19000
|
|
},
|
|
{
|
|
"loss": 0.7078,
|
|
"grad_norm": 2.7544713020324707,
|
|
"learning_rate": 1.3405953991880922e-05,
|
|
"epoch": 3.2983761840324766,
|
|
"step": 19500
|
|
},
|
|
{
|
|
"loss": 0.7154,
|
|
"grad_norm": 2.6823747158050537,
|
|
"learning_rate": 1.3236806495263872e-05,
|
|
"epoch": 3.3829499323410013,
|
|
"step": 20000
|
|
},
|
|
{
|
|
"loss": 0.7314,
|
|
"grad_norm": 3.3478825092315674,
|
|
"learning_rate": 1.3067658998646822e-05,
|
|
"epoch": 3.4675236806495264,
|
|
"step": 20500
|
|
},
|
|
{
|
|
"loss": 0.726,
|
|
"grad_norm": 3.5908212661743164,
|
|
"learning_rate": 1.289851150202977e-05,
|
|
"epoch": 3.5520974289580516,
|
|
"step": 21000
|
|
},
|
|
{
|
|
"loss": 0.7285,
|
|
"grad_norm": 3.1159133911132812,
|
|
"learning_rate": 1.2729702300405956e-05,
|
|
"epoch": 3.6366711772665763,
|
|
"step": 21500
|
|
},
|
|
{
|
|
"loss": 0.7162,
|
|
"grad_norm": 3.3147387504577637,
|
|
"learning_rate": 1.2560554803788905e-05,
|
|
"epoch": 3.7212449255751014,
|
|
"step": 22000
|
|
},
|
|
{
|
|
"loss": 0.7233,
|
|
"grad_norm": 3.7307050228118896,
|
|
"learning_rate": 1.2391407307171854e-05,
|
|
"epoch": 3.8058186738836266,
|
|
"step": 22500
|
|
},
|
|
{
|
|
"loss": 0.7159,
|
|
"grad_norm": 2.382382392883301,
|
|
"learning_rate": 1.2222259810554804e-05,
|
|
"epoch": 3.8903924221921518,
|
|
"step": 23000
|
|
},
|
|
{
|
|
"loss": 0.722,
|
|
"grad_norm": 4.039222717285156,
|
|
"learning_rate": 1.2053112313937754e-05,
|
|
"epoch": 3.9749661705006765,
|
|
"step": 23500
|
|
},
|
|
{
|
|
"loss": 0.6786,
|
|
"grad_norm": 3.7908201217651367,
|
|
"learning_rate": 1.188430311231394e-05,
|
|
"epoch": 4.059539918809202,
|
|
"step": 24000
|
|
},
|
|
{
|
|
"loss": 0.6735,
|
|
"grad_norm": 3.7970995903015137,
|
|
"learning_rate": 1.171515561569689e-05,
|
|
"epoch": 4.144113667117726,
|
|
"step": 24500
|
|
},
|
|
{
|
|
"loss": 0.6736,
|
|
"grad_norm": 4.007111549377441,
|
|
"learning_rate": 1.1546008119079838e-05,
|
|
"epoch": 4.2286874154262515,
|
|
"step": 25000
|
|
},
|
|
{
|
|
"loss": 0.664,
|
|
"grad_norm": 3.4510090351104736,
|
|
"learning_rate": 1.1376860622462788e-05,
|
|
"epoch": 4.313261163734777,
|
|
"step": 25500
|
|
},
|
|
{
|
|
"loss": 0.6724,
|
|
"grad_norm": 3.279106378555298,
|
|
"learning_rate": 1.1207713125845738e-05,
|
|
"epoch": 4.397834912043302,
|
|
"step": 26000
|
|
},
|
|
{
|
|
"loss": 0.6696,
|
|
"grad_norm": 2.7026331424713135,
|
|
"learning_rate": 1.1038565629228688e-05,
|
|
"epoch": 4.482408660351827,
|
|
"step": 26500
|
|
},
|
|
{
|
|
"loss": 0.6738,
|
|
"grad_norm": 3.247185230255127,
|
|
"learning_rate": 1.0869418132611638e-05,
|
|
"epoch": 4.566982408660352,
|
|
"step": 27000
|
|
},
|
|
{
|
|
"loss": 0.6908,
|
|
"grad_norm": 3.6047909259796143,
|
|
"learning_rate": 1.0700608930987821e-05,
|
|
"epoch": 4.651556156968876,
|
|
"step": 27500
|
|
},
|
|
{
|
|
"loss": 0.6687,
|
|
"grad_norm": 4.114670753479004,
|
|
"learning_rate": 1.0531461434370771e-05,
|
|
"epoch": 4.7361299052774015,
|
|
"step": 28000
|
|
},
|
|
{
|
|
"loss": 0.6632,
|
|
"grad_norm": 2.717122793197632,
|
|
"learning_rate": 1.0362313937753723e-05,
|
|
"epoch": 4.820703653585927,
|
|
"step": 28500
|
|
},
|
|
{
|
|
"loss": 0.6842,
|
|
"grad_norm": 2.6075901985168457,
|
|
"learning_rate": 1.0193166441136673e-05,
|
|
"epoch": 4.905277401894452,
|
|
"step": 29000
|
|
},
|
|
{
|
|
"loss": 0.6767,
|
|
"grad_norm": 3.6151123046875,
|
|
"learning_rate": 1.0024018944519623e-05,
|
|
"epoch": 4.989851150202977,
|
|
"step": 29500
|
|
},
|
|
{
|
|
"loss": 0.6441,
|
|
"grad_norm": 3.7179670333862305,
|
|
"learning_rate": 9.855209742895806e-06,
|
|
"epoch": 5.074424898511502,
|
|
"step": 30000
|
|
},
|
|
{
|
|
"loss": 0.6274,
|
|
"grad_norm": 3.9154396057128906,
|
|
"learning_rate": 9.686062246278756e-06,
|
|
"epoch": 5.158998646820027,
|
|
"step": 30500
|
|
},
|
|
{
|
|
"loss": 0.6214,
|
|
"grad_norm": 2.7849080562591553,
|
|
"learning_rate": 9.516914749661706e-06,
|
|
"epoch": 5.243572395128552,
|
|
"step": 31000
|
|
},
|
|
{
|
|
"loss": 0.6362,
|
|
"grad_norm": 3.1513593196868896,
|
|
"learning_rate": 9.347767253044656e-06,
|
|
"epoch": 5.328146143437078,
|
|
"step": 31500
|
|
},
|
|
{
|
|
"loss": 0.6366,
|
|
"grad_norm": 3.0636239051818848,
|
|
"learning_rate": 9.178619756427606e-06,
|
|
"epoch": 5.412719891745602,
|
|
"step": 32000
|
|
},
|
|
{
|
|
"loss": 0.635,
|
|
"grad_norm": 3.542881727218628,
|
|
"learning_rate": 9.009810554803789e-06,
|
|
"epoch": 5.497293640054127,
|
|
"step": 32500
|
|
},
|
|
{
|
|
"loss": 0.629,
|
|
"grad_norm": 2.9938108921051025,
|
|
"learning_rate": 8.840663058186739e-06,
|
|
"epoch": 5.581867388362652,
|
|
"step": 33000
|
|
},
|
|
{
|
|
"loss": 0.6424,
|
|
"grad_norm": 3.608818769454956,
|
|
"learning_rate": 8.67151556156969e-06,
|
|
"epoch": 5.666441136671177,
|
|
"step": 33500
|
|
},
|
|
{
|
|
"loss": 0.6354,
|
|
"grad_norm": 4.858671188354492,
|
|
"learning_rate": 8.50236806495264e-06,
|
|
"epoch": 5.7510148849797025,
|
|
"step": 34000
|
|
},
|
|
{
|
|
"loss": 0.6365,
|
|
"grad_norm": 3.254009246826172,
|
|
"learning_rate": 8.333220568335589e-06,
|
|
"epoch": 5.835588633288228,
|
|
"step": 34500
|
|
},
|
|
{
|
|
"loss": 0.6343,
|
|
"grad_norm": 2.389611005783081,
|
|
"learning_rate": 8.164073071718539e-06,
|
|
"epoch": 5.920162381596752,
|
|
"step": 35000
|
|
},
|
|
{
|
|
"loss": 0.6305,
|
|
"grad_norm": 3.2198381423950195,
|
|
"learning_rate": 7.994925575101489e-06,
|
|
"epoch": 6.004736129905277,
|
|
"step": 35500
|
|
},
|
|
{
|
|
"loss": 0.597,
|
|
"grad_norm": 2.834723711013794,
|
|
"learning_rate": 7.82577807848444e-06,
|
|
"epoch": 6.089309878213802,
|
|
"step": 36000
|
|
},
|
|
{
|
|
"loss": 0.5914,
|
|
"grad_norm": 2.7054672241210938,
|
|
"learning_rate": 7.656630581867388e-06,
|
|
"epoch": 6.173883626522327,
|
|
"step": 36500
|
|
},
|
|
{
|
|
"loss": 0.6084,
|
|
"grad_norm": 2.8164889812469482,
|
|
"learning_rate": 7.487483085250339e-06,
|
|
"epoch": 6.2584573748308525,
|
|
"step": 37000
|
|
},
|
|
{
|
|
"loss": 0.5947,
|
|
"grad_norm": 3.42501163482666,
|
|
"learning_rate": 7.3186738836265225e-06,
|
|
"epoch": 6.343031123139378,
|
|
"step": 37500
|
|
},
|
|
{
|
|
"loss": 0.604,
|
|
"grad_norm": 3.881469249725342,
|
|
"learning_rate": 7.149526387009473e-06,
|
|
"epoch": 6.427604871447903,
|
|
"step": 38000
|
|
},
|
|
{
|
|
"loss": 0.5896,
|
|
"grad_norm": 3.2387328147888184,
|
|
"learning_rate": 6.980378890392423e-06,
|
|
"epoch": 6.512178619756428,
|
|
"step": 38500
|
|
},
|
|
{
|
|
"loss": 0.5985,
|
|
"grad_norm": 3.245598316192627,
|
|
"learning_rate": 6.811231393775373e-06,
|
|
"epoch": 6.596752368064953,
|
|
"step": 39000
|
|
},
|
|
{
|
|
"loss": 0.6207,
|
|
"grad_norm": 4.7686448097229,
|
|
"learning_rate": 6.642422192151556e-06,
|
|
"epoch": 6.681326116373477,
|
|
"step": 39500
|
|
},
|
|
{
|
|
"loss": 0.6164,
|
|
"grad_norm": 3.3545920848846436,
|
|
"learning_rate": 6.473274695534507e-06,
|
|
"epoch": 6.7658998646820026,
|
|
"step": 40000
|
|
},
|
|
{
|
|
"loss": 0.5994,
|
|
"grad_norm": 3.037534713745117,
|
|
"learning_rate": 6.304127198917457e-06,
|
|
"epoch": 6.850473612990528,
|
|
"step": 40500
|
|
},
|
|
{
|
|
"loss": 0.6249,
|
|
"grad_norm": 4.4626784324646,
|
|
"learning_rate": 6.134979702300406e-06,
|
|
"epoch": 6.935047361299053,
|
|
"step": 41000
|
|
},
|
|
{
|
|
"loss": 0.5985,
|
|
"grad_norm": 2.8611857891082764,
|
|
"learning_rate": 5.96617050067659e-06,
|
|
"epoch": 7.019621109607578,
|
|
"step": 41500
|
|
},
|
|
{
|
|
"loss": 0.5841,
|
|
"grad_norm": 3.028613805770874,
|
|
"learning_rate": 5.79702300405954e-06,
|
|
"epoch": 7.104194857916103,
|
|
"step": 42000
|
|
},
|
|
{
|
|
"loss": 0.586,
|
|
"grad_norm": 2.902698040008545,
|
|
"learning_rate": 5.627875507442491e-06,
|
|
"epoch": 7.188768606224627,
|
|
"step": 42500
|
|
},
|
|
{
|
|
"loss": 0.5869,
|
|
"grad_norm": 2.2707433700561523,
|
|
"learning_rate": 5.45872801082544e-06,
|
|
"epoch": 7.273342354533153,
|
|
"step": 43000
|
|
},
|
|
{
|
|
"loss": 0.5888,
|
|
"grad_norm": 3.4720981121063232,
|
|
"learning_rate": 5.289918809201624e-06,
|
|
"epoch": 7.357916102841678,
|
|
"step": 43500
|
|
},
|
|
{
|
|
"loss": 0.576,
|
|
"grad_norm": 2.8364577293395996,
|
|
"learning_rate": 5.120771312584573e-06,
|
|
"epoch": 7.442489851150203,
|
|
"step": 44000
|
|
},
|
|
{
|
|
"loss": 0.5759,
|
|
"grad_norm": 3.178103446960449,
|
|
"learning_rate": 4.951623815967524e-06,
|
|
"epoch": 7.527063599458728,
|
|
"step": 44500
|
|
},
|
|
{
|
|
"loss": 0.5742,
|
|
"grad_norm": 3.5063467025756836,
|
|
"learning_rate": 4.782476319350474e-06,
|
|
"epoch": 7.611637347767253,
|
|
"step": 45000
|
|
},
|
|
{
|
|
"loss": 0.5912,
|
|
"grad_norm": 2.37205171585083,
|
|
"learning_rate": 4.613328822733424e-06,
|
|
"epoch": 7.696211096075778,
|
|
"step": 45500
|
|
},
|
|
{
|
|
"loss": 0.575,
|
|
"grad_norm": 3.2511661052703857,
|
|
"learning_rate": 4.444181326116374e-06,
|
|
"epoch": 7.7807848443843035,
|
|
"step": 46000
|
|
},
|
|
{
|
|
"loss": 0.5724,
|
|
"grad_norm": 3.2974693775177,
|
|
"learning_rate": 4.275033829499324e-06,
|
|
"epoch": 7.865358592692828,
|
|
"step": 46500
|
|
},
|
|
{
|
|
"loss": 0.5745,
|
|
"grad_norm": 3.180819511413574,
|
|
"learning_rate": 4.105886332882274e-06,
|
|
"epoch": 7.949932341001353,
|
|
"step": 47000
|
|
},
|
|
{
|
|
"loss": 0.5697,
|
|
"grad_norm": 2.4791033267974854,
|
|
"learning_rate": 3.937077131258458e-06,
|
|
"epoch": 8.034506089309879,
|
|
"step": 47500
|
|
},
|
|
{
|
|
"loss": 0.556,
|
|
"grad_norm": 3.5898892879486084,
|
|
"learning_rate": 3.7679296346414073e-06,
|
|
"epoch": 8.119079837618404,
|
|
"step": 48000
|
|
},
|
|
{
|
|
"loss": 0.5726,
|
|
"grad_norm": 2.7320892810821533,
|
|
"learning_rate": 3.5987821380243577e-06,
|
|
"epoch": 8.203653585926928,
|
|
"step": 48500
|
|
},
|
|
{
|
|
"loss": 0.5603,
|
|
"grad_norm": 3.3177103996276855,
|
|
"learning_rate": 3.429634641407307e-06,
|
|
"epoch": 8.288227334235453,
|
|
"step": 49000
|
|
},
|
|
{
|
|
"loss": 0.558,
|
|
"grad_norm": 2.1732170581817627,
|
|
"learning_rate": 3.2604871447902575e-06,
|
|
"epoch": 8.372801082543978,
|
|
"step": 49500
|
|
},
|
|
{
|
|
"loss": 0.5631,
|
|
"grad_norm": 3.1351735591888428,
|
|
"learning_rate": 3.091339648173207e-06,
|
|
"epoch": 8.457374830852503,
|
|
"step": 50000
|
|
},
|
|
{
|
|
"loss": 0.5486,
|
|
"grad_norm": 2.601547956466675,
|
|
"learning_rate": 2.9221921515561573e-06,
|
|
"epoch": 8.541948579161028,
|
|
"step": 50500
|
|
},
|
|
{
|
|
"loss": 0.5706,
|
|
"grad_norm": 4.160942554473877,
|
|
"learning_rate": 2.753382949932341e-06,
|
|
"epoch": 8.626522327469553,
|
|
"step": 51000
|
|
},
|
|
{
|
|
"loss": 0.5683,
|
|
"grad_norm": 3.348295211791992,
|
|
"learning_rate": 2.5842354533152914e-06,
|
|
"epoch": 8.711096075778078,
|
|
"step": 51500
|
|
},
|
|
{
|
|
"loss": 0.564,
|
|
"grad_norm": 3.148343563079834,
|
|
"learning_rate": 2.415087956698241e-06,
|
|
"epoch": 8.795669824086604,
|
|
"step": 52000
|
|
},
|
|
{
|
|
"loss": 0.5725,
|
|
"grad_norm": 3.285578489303589,
|
|
"learning_rate": 2.246278755074425e-06,
|
|
"epoch": 8.880243572395129,
|
|
"step": 52500
|
|
},
|
|
{
|
|
"loss": 0.567,
|
|
"grad_norm": 3.201730251312256,
|
|
"learning_rate": 2.077131258457375e-06,
|
|
"epoch": 8.964817320703654,
|
|
"step": 53000
|
|
},
|
|
{
|
|
"loss": 0.553,
|
|
"grad_norm": 3.166001796722412,
|
|
"learning_rate": 1.907983761840325e-06,
|
|
"epoch": 9.049391069012179,
|
|
"step": 53500
|
|
},
|
|
{
|
|
"loss": 0.5614,
|
|
"grad_norm": 3.105032444000244,
|
|
"learning_rate": 1.7388362652232748e-06,
|
|
"epoch": 9.133964817320704,
|
|
"step": 54000
|
|
},
|
|
{
|
|
"loss": 0.5553,
|
|
"grad_norm": 4.7028937339782715,
|
|
"learning_rate": 1.5696887686062248e-06,
|
|
"epoch": 9.21853856562923,
|
|
"step": 54500
|
|
},
|
|
{
|
|
"loss": 0.552,
|
|
"grad_norm": 3.5488646030426025,
|
|
"learning_rate": 1.4005412719891747e-06,
|
|
"epoch": 9.303112313937755,
|
|
"step": 55000
|
|
},
|
|
{
|
|
"loss": 0.5483,
|
|
"grad_norm": 2.8257858753204346,
|
|
"learning_rate": 1.2313937753721246e-06,
|
|
"epoch": 9.387686062246278,
|
|
"step": 55500
|
|
},
|
|
{
|
|
"loss": 0.5476,
|
|
"grad_norm": 4.221645355224609,
|
|
"learning_rate": 1.0622462787550745e-06,
|
|
"epoch": 9.472259810554803,
|
|
"step": 56000
|
|
},
|
|
{
|
|
"loss": 0.5494,
|
|
"grad_norm": 3.1773674488067627,
|
|
"learning_rate": 8.930987821380243e-07,
|
|
"epoch": 9.556833558863328,
|
|
"step": 56500
|
|
},
|
|
{
|
|
"loss": 0.5477,
|
|
"grad_norm": 2.9824230670928955,
|
|
"learning_rate": 7.242895805142085e-07,
|
|
"epoch": 9.641407307171853,
|
|
"step": 57000
|
|
},
|
|
{
|
|
"loss": 0.5579,
|
|
"grad_norm": 2.51481294631958,
|
|
"learning_rate": 5.551420838971583e-07,
|
|
"epoch": 9.725981055480379,
|
|
"step": 57500
|
|
},
|
|
{
|
|
"loss": 0.5392,
|
|
"grad_norm": 3.0290215015411377,
|
|
"learning_rate": 3.8599458728010834e-07,
|
|
"epoch": 9.810554803788904,
|
|
"step": 58000
|
|
},
|
|
{
|
|
"loss": 0.5458,
|
|
"grad_norm": 2.9967031478881836,
|
|
"learning_rate": 2.168470906630582e-07,
|
|
"epoch": 9.895128552097429,
|
|
"step": 58500
|
|
},
|
|
{
|
|
"loss": 0.5434,
|
|
"grad_norm": 2.6248016357421875,
|
|
"learning_rate": 4.769959404600812e-08,
|
|
"epoch": 9.979702300405954,
|
|
"step": 59000
|
|
},
|
|
{
|
|
"train_runtime": 8128.9827,
|
|
"train_samples_per_second": 232.692,
|
|
"train_steps_per_second": 7.273,
|
|
"total_flos": 4.123593985189478e+16,
|
|
"train_loss": 0.7104815463735867,
|
|
"epoch": 10.0,
|
|
"step": 59120
|
|
},
|
|
{
|
|
"eval_loss": 0.7965446710586548,
|
|
"eval_model_preparation_time": 0.002,
|
|
"eval_bleu": 54.95663610516514,
|
|
"eval_runtime": 6012.2223,
|
|
"eval_samples_per_second": 3.496,
|
|
"eval_steps_per_second": 0.055,
|
|
"epoch": 10.0,
|
|
"step": 59120
|
|
}
|
|
] |