|
{ |
|
"best_metric": 11.10726547241211, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-50", |
|
"epoch": 1.5594541910331383, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.015594541910331383, |
|
"grad_norm": 1.3553541898727417, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 88.9667, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.015594541910331383, |
|
"eval_loss": 11.111183166503906, |
|
"eval_runtime": 0.2874, |
|
"eval_samples_per_second": 375.846, |
|
"eval_steps_per_second": 93.962, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.031189083820662766, |
|
"grad_norm": 1.4310979843139648, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 88.9521, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.04678362573099415, |
|
"grad_norm": 1.448810338973999, |
|
"learning_rate": 1.5e-06, |
|
"loss": 88.9224, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.06237816764132553, |
|
"grad_norm": 1.4280792474746704, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 88.9165, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.07797270955165692, |
|
"grad_norm": 1.5008811950683594, |
|
"learning_rate": 2.5e-06, |
|
"loss": 88.9208, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0935672514619883, |
|
"grad_norm": 1.6087157726287842, |
|
"learning_rate": 3e-06, |
|
"loss": 88.9012, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.10916179337231968, |
|
"grad_norm": 1.6683207750320435, |
|
"learning_rate": 3.5e-06, |
|
"loss": 88.9475, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.12475633528265107, |
|
"grad_norm": 1.747448444366455, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 88.9432, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.14035087719298245, |
|
"grad_norm": 1.678775668144226, |
|
"learning_rate": 4.5e-06, |
|
"loss": 88.9057, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.15594541910331383, |
|
"grad_norm": 1.823146104812622, |
|
"learning_rate": 5e-06, |
|
"loss": 88.8821, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.17153996101364521, |
|
"grad_norm": 1.9624767303466797, |
|
"learning_rate": 4.99847706754774e-06, |
|
"loss": 88.9016, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.1871345029239766, |
|
"grad_norm": 1.9462685585021973, |
|
"learning_rate": 4.993910125649561e-06, |
|
"loss": 88.9103, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.20272904483430798, |
|
"grad_norm": 2.1563684940338135, |
|
"learning_rate": 4.986304738420684e-06, |
|
"loss": 88.9475, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.21832358674463936, |
|
"grad_norm": 2.48010516166687, |
|
"learning_rate": 4.975670171853926e-06, |
|
"loss": 88.932, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.23391812865497075, |
|
"grad_norm": 3.1107337474823, |
|
"learning_rate": 4.962019382530521e-06, |
|
"loss": 88.8091, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.24951267056530213, |
|
"grad_norm": 5.877386569976807, |
|
"learning_rate": 4.9453690018345144e-06, |
|
"loss": 88.589, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.2651072124756335, |
|
"grad_norm": 1.4020336866378784, |
|
"learning_rate": 4.925739315689991e-06, |
|
"loss": 88.9679, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.2807017543859649, |
|
"grad_norm": 1.4191652536392212, |
|
"learning_rate": 4.903154239845798e-06, |
|
"loss": 88.9306, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.2962962962962963, |
|
"grad_norm": 1.4291287660598755, |
|
"learning_rate": 4.8776412907378845e-06, |
|
"loss": 88.9514, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.31189083820662766, |
|
"grad_norm": 1.5101865530014038, |
|
"learning_rate": 4.849231551964771e-06, |
|
"loss": 88.9267, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.32748538011695905, |
|
"grad_norm": 1.5891046524047852, |
|
"learning_rate": 4.817959636416969e-06, |
|
"loss": 88.9252, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.34307992202729043, |
|
"grad_norm": 1.5987111330032349, |
|
"learning_rate": 4.783863644106502e-06, |
|
"loss": 88.9014, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.3586744639376218, |
|
"grad_norm": 1.6611799001693726, |
|
"learning_rate": 4.746985115747918e-06, |
|
"loss": 88.9222, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.3742690058479532, |
|
"grad_norm": 1.675337314605713, |
|
"learning_rate": 4.707368982147318e-06, |
|
"loss": 88.9076, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.3898635477582846, |
|
"grad_norm": 1.729783058166504, |
|
"learning_rate": 4.665063509461098e-06, |
|
"loss": 88.9013, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.40545808966861596, |
|
"grad_norm": 1.9155665636062622, |
|
"learning_rate": 4.620120240391065e-06, |
|
"loss": 88.8678, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.42105263157894735, |
|
"grad_norm": 1.9021366834640503, |
|
"learning_rate": 4.572593931387604e-06, |
|
"loss": 88.9318, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.43664717348927873, |
|
"grad_norm": 2.0137956142425537, |
|
"learning_rate": 4.522542485937369e-06, |
|
"loss": 88.8439, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.4522417153996101, |
|
"grad_norm": 2.1356048583984375, |
|
"learning_rate": 4.470026884016805e-06, |
|
"loss": 88.9014, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.4678362573099415, |
|
"grad_norm": 2.3856770992279053, |
|
"learning_rate": 4.415111107797445e-06, |
|
"loss": 88.8764, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.4834307992202729, |
|
"grad_norm": 2.872187852859497, |
|
"learning_rate": 4.357862063693486e-06, |
|
"loss": 88.8898, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.49902534113060426, |
|
"grad_norm": 4.954247951507568, |
|
"learning_rate": 4.2983495008466285e-06, |
|
"loss": 88.7124, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.5146198830409356, |
|
"grad_norm": 1.4366636276245117, |
|
"learning_rate": 4.236645926147493e-06, |
|
"loss": 88.9227, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.530214424951267, |
|
"grad_norm": 1.502366304397583, |
|
"learning_rate": 4.172826515897146e-06, |
|
"loss": 88.9448, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.5458089668615984, |
|
"grad_norm": 1.4580628871917725, |
|
"learning_rate": 4.106969024216348e-06, |
|
"loss": 88.904, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.5614035087719298, |
|
"grad_norm": 1.4254974126815796, |
|
"learning_rate": 4.039153688314146e-06, |
|
"loss": 88.9466, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.5769980506822612, |
|
"grad_norm": 1.559779405593872, |
|
"learning_rate": 3.969463130731183e-06, |
|
"loss": 88.8986, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.5925925925925926, |
|
"grad_norm": 1.6463968753814697, |
|
"learning_rate": 3.897982258676867e-06, |
|
"loss": 88.915, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.6081871345029239, |
|
"grad_norm": 1.7474920749664307, |
|
"learning_rate": 3.824798160583012e-06, |
|
"loss": 88.9155, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.6237816764132553, |
|
"grad_norm": 1.8197089433670044, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 88.871, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.6393762183235867, |
|
"grad_norm": 1.8057571649551392, |
|
"learning_rate": 3.6736789069647273e-06, |
|
"loss": 88.9049, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.6549707602339181, |
|
"grad_norm": 1.847694754600525, |
|
"learning_rate": 3.595927866972694e-06, |
|
"loss": 88.8932, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.6705653021442495, |
|
"grad_norm": 1.9483572244644165, |
|
"learning_rate": 3.516841607689501e-06, |
|
"loss": 88.8755, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.6861598440545809, |
|
"grad_norm": 2.0147557258605957, |
|
"learning_rate": 3.436516483539781e-06, |
|
"loss": 88.9301, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.7017543859649122, |
|
"grad_norm": 2.302340269088745, |
|
"learning_rate": 3.3550503583141726e-06, |
|
"loss": 88.7964, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.7173489278752436, |
|
"grad_norm": 2.700557231903076, |
|
"learning_rate": 3.272542485937369e-06, |
|
"loss": 88.7984, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.732943469785575, |
|
"grad_norm": 3.645312547683716, |
|
"learning_rate": 3.189093389542498e-06, |
|
"loss": 88.781, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.7485380116959064, |
|
"grad_norm": 5.459611892700195, |
|
"learning_rate": 3.1048047389991693e-06, |
|
"loss": 88.6414, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.7641325536062378, |
|
"grad_norm": 1.5512522459030151, |
|
"learning_rate": 3.019779227044398e-06, |
|
"loss": 88.9102, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.7797270955165692, |
|
"grad_norm": 1.5456151962280273, |
|
"learning_rate": 2.9341204441673267e-06, |
|
"loss": 88.9206, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.7797270955165692, |
|
"eval_loss": 11.10726547241211, |
|
"eval_runtime": 0.2974, |
|
"eval_samples_per_second": 363.086, |
|
"eval_steps_per_second": 90.772, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.7953216374269005, |
|
"grad_norm": 1.4868738651275635, |
|
"learning_rate": 2.847932752400164e-06, |
|
"loss": 88.9165, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.8109161793372319, |
|
"grad_norm": 1.5754703283309937, |
|
"learning_rate": 2.761321158169134e-06, |
|
"loss": 88.8994, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.8265107212475633, |
|
"grad_norm": 1.521441102027893, |
|
"learning_rate": 2.6743911843603134e-06, |
|
"loss": 88.8922, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.8421052631578947, |
|
"grad_norm": 1.5741850137710571, |
|
"learning_rate": 2.587248741756253e-06, |
|
"loss": 88.9125, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.8576998050682261, |
|
"grad_norm": 1.6995844841003418, |
|
"learning_rate": 2.5e-06, |
|
"loss": 88.8839, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.8732943469785575, |
|
"grad_norm": 1.7483824491500854, |
|
"learning_rate": 2.4127512582437486e-06, |
|
"loss": 88.8931, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 1.6258654594421387, |
|
"learning_rate": 2.325608815639687e-06, |
|
"loss": 88.9227, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.9044834307992202, |
|
"grad_norm": 1.6917520761489868, |
|
"learning_rate": 2.238678841830867e-06, |
|
"loss": 88.8958, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.9200779727095516, |
|
"grad_norm": 1.9397125244140625, |
|
"learning_rate": 2.1520672475998374e-06, |
|
"loss": 88.8976, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.935672514619883, |
|
"grad_norm": 2.0061938762664795, |
|
"learning_rate": 2.0658795558326745e-06, |
|
"loss": 88.8821, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.9512670565302144, |
|
"grad_norm": 2.2613093852996826, |
|
"learning_rate": 1.9802207729556023e-06, |
|
"loss": 88.9129, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.9668615984405458, |
|
"grad_norm": 2.683389186859131, |
|
"learning_rate": 1.895195261000831e-06, |
|
"loss": 88.7381, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.9824561403508771, |
|
"grad_norm": 3.433419704437256, |
|
"learning_rate": 1.8109066104575023e-06, |
|
"loss": 88.8607, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.9980506822612085, |
|
"grad_norm": 6.409455299377441, |
|
"learning_rate": 1.7274575140626318e-06, |
|
"loss": 88.5442, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.01364522417154, |
|
"grad_norm": 1.4463061094284058, |
|
"learning_rate": 1.6449496416858285e-06, |
|
"loss": 88.9063, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.0292397660818713, |
|
"grad_norm": 1.4709982872009277, |
|
"learning_rate": 1.56348351646022e-06, |
|
"loss": 88.9036, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.0448343079922027, |
|
"grad_norm": 1.5933891534805298, |
|
"learning_rate": 1.4831583923105e-06, |
|
"loss": 88.9131, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.060428849902534, |
|
"grad_norm": 1.418686032295227, |
|
"learning_rate": 1.4040721330273063e-06, |
|
"loss": 88.8879, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.0760233918128654, |
|
"grad_norm": 1.5968884229660034, |
|
"learning_rate": 1.3263210930352737e-06, |
|
"loss": 88.8766, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.0916179337231968, |
|
"grad_norm": 1.7474418878555298, |
|
"learning_rate": 1.2500000000000007e-06, |
|
"loss": 88.9008, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.1072124756335282, |
|
"grad_norm": 1.7670066356658936, |
|
"learning_rate": 1.1752018394169882e-06, |
|
"loss": 88.9295, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.1228070175438596, |
|
"grad_norm": 1.739268183708191, |
|
"learning_rate": 1.1020177413231334e-06, |
|
"loss": 88.8783, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.138401559454191, |
|
"grad_norm": 1.924734354019165, |
|
"learning_rate": 1.0305368692688175e-06, |
|
"loss": 88.9094, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.1539961013645224, |
|
"grad_norm": 1.8002381324768066, |
|
"learning_rate": 9.608463116858544e-07, |
|
"loss": 88.9047, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.1695906432748537, |
|
"grad_norm": 1.9778398275375366, |
|
"learning_rate": 8.930309757836517e-07, |
|
"loss": 88.8734, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.1851851851851851, |
|
"grad_norm": 2.0487163066864014, |
|
"learning_rate": 8.271734841028553e-07, |
|
"loss": 88.8375, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.2007797270955165, |
|
"grad_norm": 2.143615484237671, |
|
"learning_rate": 7.633540738525066e-07, |
|
"loss": 88.8514, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.2163742690058479, |
|
"grad_norm": 2.540370225906372, |
|
"learning_rate": 7.016504991533727e-07, |
|
"loss": 88.7744, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.2319688109161793, |
|
"grad_norm": 3.210033416748047, |
|
"learning_rate": 6.421379363065142e-07, |
|
"loss": 88.7798, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.2475633528265107, |
|
"grad_norm": 5.062490463256836, |
|
"learning_rate": 5.848888922025553e-07, |
|
"loss": 88.712, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.263157894736842, |
|
"grad_norm": 2.2439610958099365, |
|
"learning_rate": 5.299731159831953e-07, |
|
"loss": 88.918, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.2787524366471734, |
|
"grad_norm": 1.4683839082717896, |
|
"learning_rate": 4.774575140626317e-07, |
|
"loss": 88.9326, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.2943469785575048, |
|
"grad_norm": 1.4734752178192139, |
|
"learning_rate": 4.27406068612396e-07, |
|
"loss": 88.9029, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.3099415204678362, |
|
"grad_norm": 1.5223207473754883, |
|
"learning_rate": 3.798797596089351e-07, |
|
"loss": 88.9191, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.3255360623781676, |
|
"grad_norm": 1.5164719820022583, |
|
"learning_rate": 3.3493649053890325e-07, |
|
"loss": 88.8715, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.341130604288499, |
|
"grad_norm": 1.6114667654037476, |
|
"learning_rate": 2.9263101785268253e-07, |
|
"loss": 88.8649, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.3567251461988303, |
|
"grad_norm": 1.7107863426208496, |
|
"learning_rate": 2.53014884252083e-07, |
|
"loss": 88.8973, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.3723196881091617, |
|
"grad_norm": 1.7419860363006592, |
|
"learning_rate": 2.1613635589349756e-07, |
|
"loss": 88.8746, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.387914230019493, |
|
"grad_norm": 1.6827164888381958, |
|
"learning_rate": 1.8204036358303173e-07, |
|
"loss": 88.8891, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.4035087719298245, |
|
"grad_norm": 1.8386940956115723, |
|
"learning_rate": 1.507684480352292e-07, |
|
"loss": 88.8503, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.4191033138401559, |
|
"grad_norm": 1.9322350025177002, |
|
"learning_rate": 1.223587092621162e-07, |
|
"loss": 88.8244, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.4346978557504872, |
|
"grad_norm": 1.968429684638977, |
|
"learning_rate": 9.684576015420277e-08, |
|
"loss": 88.882, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.4502923976608186, |
|
"grad_norm": 2.1462831497192383, |
|
"learning_rate": 7.426068431000883e-08, |
|
"loss": 88.8721, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.46588693957115, |
|
"grad_norm": 2.4528849124908447, |
|
"learning_rate": 5.463099816548578e-08, |
|
"loss": 88.8235, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.4814814814814814, |
|
"grad_norm": 2.8609659671783447, |
|
"learning_rate": 3.798061746947995e-08, |
|
"loss": 88.7827, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.4970760233918128, |
|
"grad_norm": 4.6707563400268555, |
|
"learning_rate": 2.4329828146074096e-08, |
|
"loss": 88.6615, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.5126705653021442, |
|
"grad_norm": 2.532287120819092, |
|
"learning_rate": 1.3695261579316776e-08, |
|
"loss": 88.8792, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.5282651072124755, |
|
"grad_norm": 1.508450984954834, |
|
"learning_rate": 6.089874350439507e-09, |
|
"loss": 88.9139, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.543859649122807, |
|
"grad_norm": 1.4835156202316284, |
|
"learning_rate": 1.5229324522605949e-09, |
|
"loss": 88.9422, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.5594541910331383, |
|
"grad_norm": 1.498764991760254, |
|
"learning_rate": 0.0, |
|
"loss": 88.9188, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.5594541910331383, |
|
"eval_loss": 11.107521057128906, |
|
"eval_runtime": 0.3045, |
|
"eval_samples_per_second": 354.702, |
|
"eval_steps_per_second": 88.675, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 1 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 8592815554560.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|