|
{ |
|
"best_metric": 1.4722347259521484, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.5899705014749262, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0058997050147492625, |
|
"grad_norm": 2.9862964153289795, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 11.9031, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0058997050147492625, |
|
"eval_loss": 2.5443007946014404, |
|
"eval_runtime": 32.2234, |
|
"eval_samples_per_second": 8.876, |
|
"eval_steps_per_second": 2.234, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.011799410029498525, |
|
"grad_norm": 3.709792137145996, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 12.6814, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.017699115044247787, |
|
"grad_norm": 4.134566783905029, |
|
"learning_rate": 1.5e-06, |
|
"loss": 13.174, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02359882005899705, |
|
"grad_norm": 4.506526470184326, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 12.758, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.029498525073746312, |
|
"grad_norm": 4.452362537384033, |
|
"learning_rate": 2.5e-06, |
|
"loss": 10.6929, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"grad_norm": 5.046873092651367, |
|
"learning_rate": 3e-06, |
|
"loss": 10.9967, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.04129793510324484, |
|
"grad_norm": 6.876310348510742, |
|
"learning_rate": 3.5e-06, |
|
"loss": 12.946, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0471976401179941, |
|
"grad_norm": 8.722047805786133, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 14.8323, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.05309734513274336, |
|
"grad_norm": 8.891469955444336, |
|
"learning_rate": 4.5e-06, |
|
"loss": 14.7489, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.058997050147492625, |
|
"grad_norm": 10.75536823272705, |
|
"learning_rate": 5e-06, |
|
"loss": 14.0431, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06489675516224189, |
|
"grad_norm": 12.062834739685059, |
|
"learning_rate": 4.99847706754774e-06, |
|
"loss": 16.1985, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"grad_norm": 12.394959449768066, |
|
"learning_rate": 4.993910125649561e-06, |
|
"loss": 16.0138, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.07669616519174041, |
|
"grad_norm": 13.049904823303223, |
|
"learning_rate": 4.986304738420684e-06, |
|
"loss": 17.1506, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.08259587020648967, |
|
"grad_norm": 13.703821182250977, |
|
"learning_rate": 4.975670171853926e-06, |
|
"loss": 17.1909, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.08849557522123894, |
|
"grad_norm": 15.688636779785156, |
|
"learning_rate": 4.962019382530521e-06, |
|
"loss": 17.8761, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0943952802359882, |
|
"grad_norm": 18.053247451782227, |
|
"learning_rate": 4.9453690018345144e-06, |
|
"loss": 19.2284, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.10029498525073746, |
|
"grad_norm": 18.931636810302734, |
|
"learning_rate": 4.925739315689991e-06, |
|
"loss": 20.131, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"grad_norm": 21.049278259277344, |
|
"learning_rate": 4.903154239845798e-06, |
|
"loss": 19.2567, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.11209439528023599, |
|
"grad_norm": 20.777387619018555, |
|
"learning_rate": 4.8776412907378845e-06, |
|
"loss": 18.6328, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.11799410029498525, |
|
"grad_norm": 22.106796264648438, |
|
"learning_rate": 4.849231551964771e-06, |
|
"loss": 19.1483, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.12389380530973451, |
|
"grad_norm": 22.35647201538086, |
|
"learning_rate": 4.817959636416969e-06, |
|
"loss": 19.4738, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.12979351032448377, |
|
"grad_norm": 22.263975143432617, |
|
"learning_rate": 4.783863644106502e-06, |
|
"loss": 20.9651, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.13569321533923304, |
|
"grad_norm": 23.579002380371094, |
|
"learning_rate": 4.746985115747918e-06, |
|
"loss": 20.4206, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"grad_norm": 22.080753326416016, |
|
"learning_rate": 4.707368982147318e-06, |
|
"loss": 18.6608, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.14749262536873156, |
|
"grad_norm": 27.090911865234375, |
|
"learning_rate": 4.665063509461098e-06, |
|
"loss": 21.1101, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.15339233038348082, |
|
"grad_norm": 24.73910903930664, |
|
"learning_rate": 4.620120240391065e-06, |
|
"loss": 21.0186, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.1592920353982301, |
|
"grad_norm": 29.831039428710938, |
|
"learning_rate": 4.572593931387604e-06, |
|
"loss": 18.9938, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.16519174041297935, |
|
"grad_norm": 28.281824111938477, |
|
"learning_rate": 4.522542485937369e-06, |
|
"loss": 21.8787, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.1710914454277286, |
|
"grad_norm": 22.166667938232422, |
|
"learning_rate": 4.470026884016805e-06, |
|
"loss": 19.2404, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.17699115044247787, |
|
"grad_norm": 27.089574813842773, |
|
"learning_rate": 4.415111107797445e-06, |
|
"loss": 20.9614, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.18289085545722714, |
|
"grad_norm": 27.603740692138672, |
|
"learning_rate": 4.357862063693486e-06, |
|
"loss": 20.543, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.1887905604719764, |
|
"grad_norm": 26.96535873413086, |
|
"learning_rate": 4.2983495008466285e-06, |
|
"loss": 19.2133, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.19469026548672566, |
|
"grad_norm": 24.52537727355957, |
|
"learning_rate": 4.236645926147493e-06, |
|
"loss": 19.1972, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.20058997050147492, |
|
"grad_norm": 30.096757888793945, |
|
"learning_rate": 4.172826515897146e-06, |
|
"loss": 20.1775, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.20648967551622419, |
|
"grad_norm": 25.8487606048584, |
|
"learning_rate": 4.106969024216348e-06, |
|
"loss": 19.8463, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.21238938053097345, |
|
"grad_norm": 27.982152938842773, |
|
"learning_rate": 4.039153688314146e-06, |
|
"loss": 24.0711, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.2182890855457227, |
|
"grad_norm": 31.729448318481445, |
|
"learning_rate": 3.969463130731183e-06, |
|
"loss": 20.8358, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.22418879056047197, |
|
"grad_norm": 33.05440139770508, |
|
"learning_rate": 3.897982258676867e-06, |
|
"loss": 20.9184, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.23008849557522124, |
|
"grad_norm": 33.20439147949219, |
|
"learning_rate": 3.824798160583012e-06, |
|
"loss": 22.1186, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.2359882005899705, |
|
"grad_norm": 35.490440368652344, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 22.7667, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.24188790560471976, |
|
"grad_norm": 36.88962173461914, |
|
"learning_rate": 3.6736789069647273e-06, |
|
"loss": 23.1779, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.24778761061946902, |
|
"grad_norm": 50.72957229614258, |
|
"learning_rate": 3.595927866972694e-06, |
|
"loss": 24.7407, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.2536873156342183, |
|
"grad_norm": 3.6904404163360596, |
|
"learning_rate": 3.516841607689501e-06, |
|
"loss": 10.9707, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.25958702064896755, |
|
"grad_norm": 5.453059196472168, |
|
"learning_rate": 3.436516483539781e-06, |
|
"loss": 13.0506, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.26548672566371684, |
|
"grad_norm": 4.847866535186768, |
|
"learning_rate": 3.3550503583141726e-06, |
|
"loss": 11.3953, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.2713864306784661, |
|
"grad_norm": 5.430877685546875, |
|
"learning_rate": 3.272542485937369e-06, |
|
"loss": 9.8737, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.27728613569321536, |
|
"grad_norm": 8.19796085357666, |
|
"learning_rate": 3.189093389542498e-06, |
|
"loss": 10.3856, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.2831858407079646, |
|
"grad_norm": 8.586030006408691, |
|
"learning_rate": 3.1048047389991693e-06, |
|
"loss": 11.5376, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.2890855457227139, |
|
"grad_norm": 8.650554656982422, |
|
"learning_rate": 3.019779227044398e-06, |
|
"loss": 11.1728, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.2949852507374631, |
|
"grad_norm": 11.194820404052734, |
|
"learning_rate": 2.9341204441673267e-06, |
|
"loss": 12.0976, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2949852507374631, |
|
"eval_loss": 1.8799346685409546, |
|
"eval_runtime": 32.683, |
|
"eval_samples_per_second": 8.751, |
|
"eval_steps_per_second": 2.203, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.3008849557522124, |
|
"grad_norm": 14.157112121582031, |
|
"learning_rate": 2.847932752400164e-06, |
|
"loss": 11.7297, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.30678466076696165, |
|
"grad_norm": 13.268279075622559, |
|
"learning_rate": 2.761321158169134e-06, |
|
"loss": 11.9854, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.31268436578171094, |
|
"grad_norm": 14.777017593383789, |
|
"learning_rate": 2.6743911843603134e-06, |
|
"loss": 11.6982, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.3185840707964602, |
|
"grad_norm": 15.651505470275879, |
|
"learning_rate": 2.587248741756253e-06, |
|
"loss": 12.7209, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.32448377581120946, |
|
"grad_norm": 16.862051010131836, |
|
"learning_rate": 2.5e-06, |
|
"loss": 12.7352, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.3303834808259587, |
|
"grad_norm": 16.122634887695312, |
|
"learning_rate": 2.4127512582437486e-06, |
|
"loss": 12.8166, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.336283185840708, |
|
"grad_norm": 20.204120635986328, |
|
"learning_rate": 2.325608815639687e-06, |
|
"loss": 13.0973, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.3421828908554572, |
|
"grad_norm": 15.840360641479492, |
|
"learning_rate": 2.238678841830867e-06, |
|
"loss": 13.2673, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.3480825958702065, |
|
"grad_norm": 27.855974197387695, |
|
"learning_rate": 2.1520672475998374e-06, |
|
"loss": 13.2521, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.35398230088495575, |
|
"grad_norm": 21.352693557739258, |
|
"learning_rate": 2.0658795558326745e-06, |
|
"loss": 13.3141, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.35988200589970504, |
|
"grad_norm": 24.652055740356445, |
|
"learning_rate": 1.9802207729556023e-06, |
|
"loss": 13.715, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.36578171091445427, |
|
"grad_norm": 24.22166633605957, |
|
"learning_rate": 1.895195261000831e-06, |
|
"loss": 12.7998, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.37168141592920356, |
|
"grad_norm": 22.723926544189453, |
|
"learning_rate": 1.8109066104575023e-06, |
|
"loss": 14.2915, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.3775811209439528, |
|
"grad_norm": 25.48423194885254, |
|
"learning_rate": 1.7274575140626318e-06, |
|
"loss": 13.0007, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.3834808259587021, |
|
"grad_norm": 23.430404663085938, |
|
"learning_rate": 1.6449496416858285e-06, |
|
"loss": 14.1499, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.3893805309734513, |
|
"grad_norm": 25.634765625, |
|
"learning_rate": 1.56348351646022e-06, |
|
"loss": 12.2943, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.3952802359882006, |
|
"grad_norm": 29.16959571838379, |
|
"learning_rate": 1.4831583923105e-06, |
|
"loss": 15.9917, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.40117994100294985, |
|
"grad_norm": 26.52433204650879, |
|
"learning_rate": 1.4040721330273063e-06, |
|
"loss": 13.1748, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.40707964601769914, |
|
"grad_norm": 23.716463088989258, |
|
"learning_rate": 1.3263210930352737e-06, |
|
"loss": 13.5076, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.41297935103244837, |
|
"grad_norm": 25.928203582763672, |
|
"learning_rate": 1.2500000000000007e-06, |
|
"loss": 13.0781, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.41887905604719766, |
|
"grad_norm": 25.973100662231445, |
|
"learning_rate": 1.1752018394169882e-06, |
|
"loss": 12.7629, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.4247787610619469, |
|
"grad_norm": 26.729578018188477, |
|
"learning_rate": 1.1020177413231334e-06, |
|
"loss": 13.0884, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.4306784660766962, |
|
"grad_norm": 28.534502029418945, |
|
"learning_rate": 1.0305368692688175e-06, |
|
"loss": 14.8765, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.4365781710914454, |
|
"grad_norm": 27.88771629333496, |
|
"learning_rate": 9.608463116858544e-07, |
|
"loss": 16.3264, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.4424778761061947, |
|
"grad_norm": 35.43700408935547, |
|
"learning_rate": 8.930309757836517e-07, |
|
"loss": 13.3003, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.44837758112094395, |
|
"grad_norm": 31.386333465576172, |
|
"learning_rate": 8.271734841028553e-07, |
|
"loss": 16.0848, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.45427728613569324, |
|
"grad_norm": 33.588722229003906, |
|
"learning_rate": 7.633540738525066e-07, |
|
"loss": 14.6917, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.46017699115044247, |
|
"grad_norm": 31.54501724243164, |
|
"learning_rate": 7.016504991533727e-07, |
|
"loss": 12.2341, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.46607669616519176, |
|
"grad_norm": 39.24992370605469, |
|
"learning_rate": 6.421379363065142e-07, |
|
"loss": 14.9348, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.471976401179941, |
|
"grad_norm": 38.33148193359375, |
|
"learning_rate": 5.848888922025553e-07, |
|
"loss": 11.4804, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.4778761061946903, |
|
"grad_norm": 39.474124908447266, |
|
"learning_rate": 5.299731159831953e-07, |
|
"loss": 12.2075, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.4837758112094395, |
|
"grad_norm": 40.61744689941406, |
|
"learning_rate": 4.774575140626317e-07, |
|
"loss": 15.4732, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.4896755162241888, |
|
"grad_norm": 43.01291275024414, |
|
"learning_rate": 4.27406068612396e-07, |
|
"loss": 13.508, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.49557522123893805, |
|
"grad_norm": 53.99359893798828, |
|
"learning_rate": 3.798797596089351e-07, |
|
"loss": 17.6752, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.5014749262536873, |
|
"grad_norm": 4.3658342361450195, |
|
"learning_rate": 3.3493649053890325e-07, |
|
"loss": 9.9121, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.5073746312684366, |
|
"grad_norm": 4.73631477355957, |
|
"learning_rate": 2.9263101785268253e-07, |
|
"loss": 12.0327, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.5132743362831859, |
|
"grad_norm": 5.241606712341309, |
|
"learning_rate": 2.53014884252083e-07, |
|
"loss": 10.5648, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.5191740412979351, |
|
"grad_norm": 6.079821586608887, |
|
"learning_rate": 2.1613635589349756e-07, |
|
"loss": 11.1205, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.5250737463126843, |
|
"grad_norm": 6.707623481750488, |
|
"learning_rate": 1.8204036358303173e-07, |
|
"loss": 9.6277, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.5309734513274337, |
|
"grad_norm": 7.61404275894165, |
|
"learning_rate": 1.507684480352292e-07, |
|
"loss": 7.6849, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.5368731563421829, |
|
"grad_norm": 9.975937843322754, |
|
"learning_rate": 1.223587092621162e-07, |
|
"loss": 10.2248, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.5427728613569321, |
|
"grad_norm": 11.2512845993042, |
|
"learning_rate": 9.684576015420277e-08, |
|
"loss": 9.7481, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.5486725663716814, |
|
"grad_norm": 12.412166595458984, |
|
"learning_rate": 7.426068431000883e-08, |
|
"loss": 9.6559, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.5545722713864307, |
|
"grad_norm": 13.59131145477295, |
|
"learning_rate": 5.463099816548578e-08, |
|
"loss": 10.219, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.56047197640118, |
|
"grad_norm": 12.337257385253906, |
|
"learning_rate": 3.798061746947995e-08, |
|
"loss": 10.9511, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.5663716814159292, |
|
"grad_norm": 14.506831169128418, |
|
"learning_rate": 2.4329828146074096e-08, |
|
"loss": 9.9813, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.5722713864306784, |
|
"grad_norm": 16.134977340698242, |
|
"learning_rate": 1.3695261579316776e-08, |
|
"loss": 10.8139, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.5781710914454278, |
|
"grad_norm": 16.2382755279541, |
|
"learning_rate": 6.089874350439507e-09, |
|
"loss": 11.4754, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.584070796460177, |
|
"grad_norm": 16.110279083251953, |
|
"learning_rate": 1.5229324522605949e-09, |
|
"loss": 10.8895, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.5899705014749262, |
|
"grad_norm": 19.847707748413086, |
|
"learning_rate": 0.0, |
|
"loss": 11.8983, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.5899705014749262, |
|
"eval_loss": 1.4722347259521484, |
|
"eval_runtime": 32.6234, |
|
"eval_samples_per_second": 8.767, |
|
"eval_steps_per_second": 2.207, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.5391158788096e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|