|
{ |
|
"best_metric": 3.9525041580200195, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.015006283881375326, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00015006283881375325, |
|
"grad_norm": 6.556857585906982, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 32.4793, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00015006283881375325, |
|
"eval_loss": 4.422780513763428, |
|
"eval_runtime": 1166.7634, |
|
"eval_samples_per_second": 9.62, |
|
"eval_steps_per_second": 2.405, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0003001256776275065, |
|
"grad_norm": 6.7071757316589355, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 33.3956, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0004501885164412598, |
|
"grad_norm": 7.008890628814697, |
|
"learning_rate": 1.5e-06, |
|
"loss": 33.3792, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.000600251355255013, |
|
"grad_norm": 7.096307754516602, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 35.099, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0007503141940687663, |
|
"grad_norm": 7.1125569343566895, |
|
"learning_rate": 2.5e-06, |
|
"loss": 35.264, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0009003770328825196, |
|
"grad_norm": 7.323802947998047, |
|
"learning_rate": 3e-06, |
|
"loss": 34.7285, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.001050439871696273, |
|
"grad_norm": 7.22035026550293, |
|
"learning_rate": 3.5e-06, |
|
"loss": 35.6589, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.001200502710510026, |
|
"grad_norm": 7.584096908569336, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 36.3612, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0013505655493237794, |
|
"grad_norm": 8.085339546203613, |
|
"learning_rate": 4.5e-06, |
|
"loss": 35.9618, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0015006283881375327, |
|
"grad_norm": 9.453437805175781, |
|
"learning_rate": 5e-06, |
|
"loss": 37.0509, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0016506912269512858, |
|
"grad_norm": 10.464935302734375, |
|
"learning_rate": 4.99847706754774e-06, |
|
"loss": 39.5601, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0018007540657650391, |
|
"grad_norm": 9.660285949707031, |
|
"learning_rate": 4.993910125649561e-06, |
|
"loss": 37.7704, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0019508169045787923, |
|
"grad_norm": 11.434601783752441, |
|
"learning_rate": 4.986304738420684e-06, |
|
"loss": 39.5423, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.002100879743392546, |
|
"grad_norm": 12.925939559936523, |
|
"learning_rate": 4.975670171853926e-06, |
|
"loss": 40.1121, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.002250942582206299, |
|
"grad_norm": 13.035168647766113, |
|
"learning_rate": 4.962019382530521e-06, |
|
"loss": 39.6252, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.002401005421020052, |
|
"grad_norm": 11.976016998291016, |
|
"learning_rate": 4.9453690018345144e-06, |
|
"loss": 39.7855, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0025510682598338056, |
|
"grad_norm": 14.432867050170898, |
|
"learning_rate": 4.925739315689991e-06, |
|
"loss": 43.1844, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0027011310986475587, |
|
"grad_norm": 13.167061805725098, |
|
"learning_rate": 4.903154239845798e-06, |
|
"loss": 42.9399, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.002851193937461312, |
|
"grad_norm": 16.086027145385742, |
|
"learning_rate": 4.8776412907378845e-06, |
|
"loss": 43.4687, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0030012567762750654, |
|
"grad_norm": 16.82833480834961, |
|
"learning_rate": 4.849231551964771e-06, |
|
"loss": 43.5851, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0031513196150888185, |
|
"grad_norm": 16.73269271850586, |
|
"learning_rate": 4.817959636416969e-06, |
|
"loss": 44.3198, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0033013824539025716, |
|
"grad_norm": 22.951553344726562, |
|
"learning_rate": 4.783863644106502e-06, |
|
"loss": 45.8731, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.003451445292716325, |
|
"grad_norm": 19.36189842224121, |
|
"learning_rate": 4.746985115747918e-06, |
|
"loss": 45.582, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0036015081315300783, |
|
"grad_norm": 20.003419876098633, |
|
"learning_rate": 4.707368982147318e-06, |
|
"loss": 46.9365, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0037515709703438314, |
|
"grad_norm": 13.010815620422363, |
|
"learning_rate": 4.665063509461098e-06, |
|
"loss": 35.3997, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0039016338091575845, |
|
"grad_norm": 12.633563995361328, |
|
"learning_rate": 4.620120240391065e-06, |
|
"loss": 33.8044, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.004051696647971338, |
|
"grad_norm": 12.934494972229004, |
|
"learning_rate": 4.572593931387604e-06, |
|
"loss": 34.0807, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.004201759486785092, |
|
"grad_norm": 12.416427612304688, |
|
"learning_rate": 4.522542485937369e-06, |
|
"loss": 31.6505, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.004351822325598845, |
|
"grad_norm": 12.711592674255371, |
|
"learning_rate": 4.470026884016805e-06, |
|
"loss": 33.807, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.004501885164412598, |
|
"grad_norm": 13.012476921081543, |
|
"learning_rate": 4.415111107797445e-06, |
|
"loss": 33.2907, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.004651948003226351, |
|
"grad_norm": 13.100297927856445, |
|
"learning_rate": 4.357862063693486e-06, |
|
"loss": 32.955, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.004802010842040104, |
|
"grad_norm": 12.629734992980957, |
|
"learning_rate": 4.2983495008466285e-06, |
|
"loss": 33.4184, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.004952073680853857, |
|
"grad_norm": 13.031803131103516, |
|
"learning_rate": 4.236645926147493e-06, |
|
"loss": 32.4434, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.005102136519667611, |
|
"grad_norm": 13.07551383972168, |
|
"learning_rate": 4.172826515897146e-06, |
|
"loss": 31.8428, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.005252199358481364, |
|
"grad_norm": 13.340009689331055, |
|
"learning_rate": 4.106969024216348e-06, |
|
"loss": 30.6754, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.005402262197295117, |
|
"grad_norm": 13.459136962890625, |
|
"learning_rate": 4.039153688314146e-06, |
|
"loss": 32.3325, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.0055523250361088705, |
|
"grad_norm": 13.509114265441895, |
|
"learning_rate": 3.969463130731183e-06, |
|
"loss": 30.3527, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.005702387874922624, |
|
"grad_norm": 14.133980751037598, |
|
"learning_rate": 3.897982258676867e-06, |
|
"loss": 29.5306, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.005852450713736377, |
|
"grad_norm": 13.122635841369629, |
|
"learning_rate": 3.824798160583012e-06, |
|
"loss": 27.8162, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.006002513552550131, |
|
"grad_norm": 13.173026084899902, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 28.7461, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.006152576391363884, |
|
"grad_norm": 15.021635055541992, |
|
"learning_rate": 3.6736789069647273e-06, |
|
"loss": 29.4984, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.006302639230177637, |
|
"grad_norm": 14.209413528442383, |
|
"learning_rate": 3.595927866972694e-06, |
|
"loss": 30.0559, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.00645270206899139, |
|
"grad_norm": 14.6431884765625, |
|
"learning_rate": 3.516841607689501e-06, |
|
"loss": 29.7791, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.006602764907805143, |
|
"grad_norm": 12.084189414978027, |
|
"learning_rate": 3.436516483539781e-06, |
|
"loss": 28.6705, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.006752827746618896, |
|
"grad_norm": 13.18972396850586, |
|
"learning_rate": 3.3550503583141726e-06, |
|
"loss": 28.7666, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.00690289058543265, |
|
"grad_norm": 13.173684120178223, |
|
"learning_rate": 3.272542485937369e-06, |
|
"loss": 27.7091, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0070529534242464035, |
|
"grad_norm": 14.174872398376465, |
|
"learning_rate": 3.189093389542498e-06, |
|
"loss": 28.4409, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.007203016263060157, |
|
"grad_norm": 13.488956451416016, |
|
"learning_rate": 3.1048047389991693e-06, |
|
"loss": 25.6084, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.00735307910187391, |
|
"grad_norm": 15.413603782653809, |
|
"learning_rate": 3.019779227044398e-06, |
|
"loss": 26.7375, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.007503141940687663, |
|
"grad_norm": 14.66285228729248, |
|
"learning_rate": 2.9341204441673267e-06, |
|
"loss": 28.3319, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.007503141940687663, |
|
"eval_loss": 4.124388217926025, |
|
"eval_runtime": 1172.9983, |
|
"eval_samples_per_second": 9.569, |
|
"eval_steps_per_second": 2.392, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.007653204779501416, |
|
"grad_norm": 9.979507446289062, |
|
"learning_rate": 2.847932752400164e-06, |
|
"loss": 30.6154, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.007803267618315169, |
|
"grad_norm": 10.939770698547363, |
|
"learning_rate": 2.761321158169134e-06, |
|
"loss": 31.6489, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.007953330457128923, |
|
"grad_norm": 11.191797256469727, |
|
"learning_rate": 2.6743911843603134e-06, |
|
"loss": 30.9841, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.008103393295942675, |
|
"grad_norm": 11.720529556274414, |
|
"learning_rate": 2.587248741756253e-06, |
|
"loss": 32.4647, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.00825345613475643, |
|
"grad_norm": 13.322903633117676, |
|
"learning_rate": 2.5e-06, |
|
"loss": 33.2152, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.008403518973570183, |
|
"grad_norm": 12.972123146057129, |
|
"learning_rate": 2.4127512582437486e-06, |
|
"loss": 32.6633, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.008553581812383936, |
|
"grad_norm": 12.695440292358398, |
|
"learning_rate": 2.325608815639687e-06, |
|
"loss": 33.9742, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.00870364465119769, |
|
"grad_norm": 13.083794593811035, |
|
"learning_rate": 2.238678841830867e-06, |
|
"loss": 34.3758, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.008853707490011442, |
|
"grad_norm": 14.709982872009277, |
|
"learning_rate": 2.1520672475998374e-06, |
|
"loss": 33.0617, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.009003770328825196, |
|
"grad_norm": 15.33502197265625, |
|
"learning_rate": 2.0658795558326745e-06, |
|
"loss": 32.3498, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.009153833167638948, |
|
"grad_norm": 16.43514060974121, |
|
"learning_rate": 1.9802207729556023e-06, |
|
"loss": 35.4804, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.009303896006452702, |
|
"grad_norm": 16.312402725219727, |
|
"learning_rate": 1.895195261000831e-06, |
|
"loss": 36.8484, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.009453958845266456, |
|
"grad_norm": 18.906774520874023, |
|
"learning_rate": 1.8109066104575023e-06, |
|
"loss": 37.3672, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.009604021684080208, |
|
"grad_norm": 17.955244064331055, |
|
"learning_rate": 1.7274575140626318e-06, |
|
"loss": 38.5029, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.009754084522893962, |
|
"grad_norm": 20.66045379638672, |
|
"learning_rate": 1.6449496416858285e-06, |
|
"loss": 35.5722, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.009904147361707714, |
|
"grad_norm": 20.17714500427246, |
|
"learning_rate": 1.56348351646022e-06, |
|
"loss": 37.8927, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.010054210200521468, |
|
"grad_norm": 22.08597755432129, |
|
"learning_rate": 1.4831583923105e-06, |
|
"loss": 38.4573, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.010204273039335222, |
|
"grad_norm": 24.34529685974121, |
|
"learning_rate": 1.4040721330273063e-06, |
|
"loss": 40.1156, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.010354335878148975, |
|
"grad_norm": 22.071252822875977, |
|
"learning_rate": 1.3263210930352737e-06, |
|
"loss": 40.0515, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.010504398716962729, |
|
"grad_norm": 23.233043670654297, |
|
"learning_rate": 1.2500000000000007e-06, |
|
"loss": 38.8181, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.010654461555776481, |
|
"grad_norm": 24.044532775878906, |
|
"learning_rate": 1.1752018394169882e-06, |
|
"loss": 38.3733, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.010804524394590235, |
|
"grad_norm": 28.727855682373047, |
|
"learning_rate": 1.1020177413231334e-06, |
|
"loss": 38.9866, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.010954587233403987, |
|
"grad_norm": 31.710817337036133, |
|
"learning_rate": 1.0305368692688175e-06, |
|
"loss": 43.0757, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.011104650072217741, |
|
"grad_norm": 29.618091583251953, |
|
"learning_rate": 9.608463116858544e-07, |
|
"loss": 40.1635, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.011254712911031495, |
|
"grad_norm": 15.594468116760254, |
|
"learning_rate": 8.930309757836517e-07, |
|
"loss": 31.7493, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.011404775749845247, |
|
"grad_norm": 16.501638412475586, |
|
"learning_rate": 8.271734841028553e-07, |
|
"loss": 30.6078, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.011554838588659001, |
|
"grad_norm": 16.97194480895996, |
|
"learning_rate": 7.633540738525066e-07, |
|
"loss": 30.2159, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.011704901427472754, |
|
"grad_norm": 17.44843864440918, |
|
"learning_rate": 7.016504991533727e-07, |
|
"loss": 29.8036, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.011854964266286508, |
|
"grad_norm": 15.254544258117676, |
|
"learning_rate": 6.421379363065142e-07, |
|
"loss": 28.1706, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.012005027105100262, |
|
"grad_norm": 18.066022872924805, |
|
"learning_rate": 5.848888922025553e-07, |
|
"loss": 29.3522, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.012155089943914014, |
|
"grad_norm": 17.23478126525879, |
|
"learning_rate": 5.299731159831953e-07, |
|
"loss": 30.1829, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.012305152782727768, |
|
"grad_norm": 16.398849487304688, |
|
"learning_rate": 4.774575140626317e-07, |
|
"loss": 28.8294, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.01245521562154152, |
|
"grad_norm": 16.836814880371094, |
|
"learning_rate": 4.27406068612396e-07, |
|
"loss": 29.5157, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.012605278460355274, |
|
"grad_norm": 14.805627822875977, |
|
"learning_rate": 3.798797596089351e-07, |
|
"loss": 30.2987, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.012755341299169026, |
|
"grad_norm": 16.60382080078125, |
|
"learning_rate": 3.3493649053890325e-07, |
|
"loss": 30.1055, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.01290540413798278, |
|
"grad_norm": 14.74989128112793, |
|
"learning_rate": 2.9263101785268253e-07, |
|
"loss": 28.499, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.013055466976796534, |
|
"grad_norm": 18.09991455078125, |
|
"learning_rate": 2.53014884252083e-07, |
|
"loss": 31.4409, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.013205529815610286, |
|
"grad_norm": 15.617703437805176, |
|
"learning_rate": 2.1613635589349756e-07, |
|
"loss": 29.3455, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.01335559265442404, |
|
"grad_norm": 17.055313110351562, |
|
"learning_rate": 1.8204036358303173e-07, |
|
"loss": 26.5924, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.013505655493237793, |
|
"grad_norm": 15.88111686706543, |
|
"learning_rate": 1.507684480352292e-07, |
|
"loss": 26.878, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.013655718332051547, |
|
"grad_norm": 16.348384857177734, |
|
"learning_rate": 1.223587092621162e-07, |
|
"loss": 28.8989, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.0138057811708653, |
|
"grad_norm": 15.886968612670898, |
|
"learning_rate": 9.684576015420277e-08, |
|
"loss": 26.7125, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.013955844009679053, |
|
"grad_norm": 16.663654327392578, |
|
"learning_rate": 7.426068431000883e-08, |
|
"loss": 27.2911, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.014105906848492807, |
|
"grad_norm": 17.79175567626953, |
|
"learning_rate": 5.463099816548578e-08, |
|
"loss": 27.798, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.01425596968730656, |
|
"grad_norm": 15.905275344848633, |
|
"learning_rate": 3.798061746947995e-08, |
|
"loss": 26.0851, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.014406032526120313, |
|
"grad_norm": 16.322898864746094, |
|
"learning_rate": 2.4329828146074096e-08, |
|
"loss": 27.9597, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.014556095364934065, |
|
"grad_norm": 16.844186782836914, |
|
"learning_rate": 1.3695261579316776e-08, |
|
"loss": 25.2095, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.01470615820374782, |
|
"grad_norm": 16.607568740844727, |
|
"learning_rate": 6.089874350439507e-09, |
|
"loss": 24.5913, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.014856221042561573, |
|
"grad_norm": 15.835559844970703, |
|
"learning_rate": 1.5229324522605949e-09, |
|
"loss": 24.0955, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.015006283881375326, |
|
"grad_norm": 18.256946563720703, |
|
"learning_rate": 0.0, |
|
"loss": 25.9992, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.015006283881375326, |
|
"eval_loss": 3.9525041580200195, |
|
"eval_runtime": 1173.1028, |
|
"eval_samples_per_second": 9.568, |
|
"eval_steps_per_second": 2.392, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.9392388208787456e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|