|
{ |
|
"best_metric": 11.932085037231445, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.16214025131738954, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0016214025131738954, |
|
"grad_norm": 0.06148737668991089, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 11.937, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0016214025131738954, |
|
"eval_loss": 11.932759284973145, |
|
"eval_runtime": 5.5329, |
|
"eval_samples_per_second": 187.787, |
|
"eval_steps_per_second": 23.496, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0032428050263477908, |
|
"grad_norm": 0.04782642051577568, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 11.9386, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.004864207539521687, |
|
"grad_norm": 0.06458375602960587, |
|
"learning_rate": 1.5e-06, |
|
"loss": 11.9403, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0064856100526955816, |
|
"grad_norm": 0.0695745050907135, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 11.9382, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.008107012565869477, |
|
"grad_norm": 0.07331092655658722, |
|
"learning_rate": 2.5e-06, |
|
"loss": 11.9409, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.009728415079043373, |
|
"grad_norm": 0.06527193635702133, |
|
"learning_rate": 3e-06, |
|
"loss": 11.9391, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.011349817592217268, |
|
"grad_norm": 0.06476838886737823, |
|
"learning_rate": 3.5e-06, |
|
"loss": 11.9388, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.012971220105391163, |
|
"grad_norm": 0.08535204827785492, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 11.9395, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.014592622618565058, |
|
"grad_norm": 0.08116604387760162, |
|
"learning_rate": 4.5e-06, |
|
"loss": 11.9395, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.016214025131738953, |
|
"grad_norm": 0.07606469094753265, |
|
"learning_rate": 5e-06, |
|
"loss": 11.9402, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01783542764491285, |
|
"grad_norm": 0.06500118225812912, |
|
"learning_rate": 4.99847706754774e-06, |
|
"loss": 11.9375, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.019456830158086746, |
|
"grad_norm": 0.04997660592198372, |
|
"learning_rate": 4.993910125649561e-06, |
|
"loss": 11.9373, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02107823267126064, |
|
"grad_norm": 0.0678182914853096, |
|
"learning_rate": 4.986304738420684e-06, |
|
"loss": 11.9364, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.022699635184434536, |
|
"grad_norm": 0.0676642581820488, |
|
"learning_rate": 4.975670171853926e-06, |
|
"loss": 11.9381, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02432103769760843, |
|
"grad_norm": 0.07154196500778198, |
|
"learning_rate": 4.962019382530521e-06, |
|
"loss": 11.9372, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.025942440210782326, |
|
"grad_norm": 0.05947857350111008, |
|
"learning_rate": 4.9453690018345144e-06, |
|
"loss": 11.9374, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.027563842723956223, |
|
"grad_norm": 0.0858941599726677, |
|
"learning_rate": 4.925739315689991e-06, |
|
"loss": 11.9382, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.029185245237130116, |
|
"grad_norm": 0.08636289834976196, |
|
"learning_rate": 4.903154239845798e-06, |
|
"loss": 11.9327, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.030806647750304013, |
|
"grad_norm": 0.09262266755104065, |
|
"learning_rate": 4.8776412907378845e-06, |
|
"loss": 11.9362, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.032428050263477906, |
|
"grad_norm": 0.07391153275966644, |
|
"learning_rate": 4.849231551964771e-06, |
|
"loss": 11.9341, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.034049452776651806, |
|
"grad_norm": 0.0751374289393425, |
|
"learning_rate": 4.817959636416969e-06, |
|
"loss": 11.9347, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0356708552898257, |
|
"grad_norm": 0.0785205066204071, |
|
"learning_rate": 4.783863644106502e-06, |
|
"loss": 11.9299, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03729225780299959, |
|
"grad_norm": 0.05841010808944702, |
|
"learning_rate": 4.746985115747918e-06, |
|
"loss": 11.9302, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.03891366031617349, |
|
"grad_norm": 0.062468521296978, |
|
"learning_rate": 4.707368982147318e-06, |
|
"loss": 11.9343, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.040535062829347386, |
|
"grad_norm": 0.06350651383399963, |
|
"learning_rate": 4.665063509461098e-06, |
|
"loss": 11.9273, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04215646534252128, |
|
"grad_norm": 0.07266425341367722, |
|
"learning_rate": 4.620120240391065e-06, |
|
"loss": 11.9273, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.04377786785569518, |
|
"grad_norm": 0.07261718064546585, |
|
"learning_rate": 4.572593931387604e-06, |
|
"loss": 11.9324, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.04539927036886907, |
|
"grad_norm": 0.08380614221096039, |
|
"learning_rate": 4.522542485937369e-06, |
|
"loss": 11.9287, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.047020672882042966, |
|
"grad_norm": 0.08229350298643112, |
|
"learning_rate": 4.470026884016805e-06, |
|
"loss": 11.9283, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.04864207539521686, |
|
"grad_norm": 0.07520421594381332, |
|
"learning_rate": 4.415111107797445e-06, |
|
"loss": 11.9224, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05026347790839076, |
|
"grad_norm": 0.11293678730726242, |
|
"learning_rate": 4.357862063693486e-06, |
|
"loss": 11.9268, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.05188488042156465, |
|
"grad_norm": 0.09202712029218674, |
|
"learning_rate": 4.2983495008466285e-06, |
|
"loss": 11.9304, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.053506282934738546, |
|
"grad_norm": 0.09367135912179947, |
|
"learning_rate": 4.236645926147493e-06, |
|
"loss": 11.923, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.055127685447912446, |
|
"grad_norm": 0.08824901282787323, |
|
"learning_rate": 4.172826515897146e-06, |
|
"loss": 11.9329, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.05674908796108634, |
|
"grad_norm": 0.08749401569366455, |
|
"learning_rate": 4.106969024216348e-06, |
|
"loss": 11.9262, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.05837049047426023, |
|
"grad_norm": 0.08584518730640411, |
|
"learning_rate": 4.039153688314146e-06, |
|
"loss": 11.9275, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.05999189298743413, |
|
"grad_norm": 0.08267547190189362, |
|
"learning_rate": 3.969463130731183e-06, |
|
"loss": 11.9278, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.061613295500608026, |
|
"grad_norm": 0.08701630681753159, |
|
"learning_rate": 3.897982258676867e-06, |
|
"loss": 11.9249, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.06323469801378193, |
|
"grad_norm": 0.08931490033864975, |
|
"learning_rate": 3.824798160583012e-06, |
|
"loss": 11.9325, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.06485610052695581, |
|
"grad_norm": 0.11539775133132935, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 11.9238, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.06647750304012971, |
|
"grad_norm": 0.10175476223230362, |
|
"learning_rate": 3.6736789069647273e-06, |
|
"loss": 11.9282, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.06809890555330361, |
|
"grad_norm": 0.11972854286432266, |
|
"learning_rate": 3.595927866972694e-06, |
|
"loss": 11.9298, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.0697203080664775, |
|
"grad_norm": 0.11182477325201035, |
|
"learning_rate": 3.516841607689501e-06, |
|
"loss": 11.934, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.0713417105796514, |
|
"grad_norm": 0.09574752300977707, |
|
"learning_rate": 3.436516483539781e-06, |
|
"loss": 11.9356, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.0729631130928253, |
|
"grad_norm": 0.13956184685230255, |
|
"learning_rate": 3.3550503583141726e-06, |
|
"loss": 11.9262, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.07458451560599919, |
|
"grad_norm": 0.1027691513299942, |
|
"learning_rate": 3.272542485937369e-06, |
|
"loss": 11.9351, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.07620591811917309, |
|
"grad_norm": 0.13854406774044037, |
|
"learning_rate": 3.189093389542498e-06, |
|
"loss": 11.9253, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.07782732063234699, |
|
"grad_norm": 0.12041836977005005, |
|
"learning_rate": 3.1048047389991693e-06, |
|
"loss": 11.9411, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.07944872314552087, |
|
"grad_norm": 0.11676981300115585, |
|
"learning_rate": 3.019779227044398e-06, |
|
"loss": 11.939, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.08107012565869477, |
|
"grad_norm": 0.13951873779296875, |
|
"learning_rate": 2.9341204441673267e-06, |
|
"loss": 11.9339, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08107012565869477, |
|
"eval_loss": 11.932246208190918, |
|
"eval_runtime": 5.5126, |
|
"eval_samples_per_second": 188.476, |
|
"eval_steps_per_second": 23.582, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08269152817186867, |
|
"grad_norm": 0.06557369232177734, |
|
"learning_rate": 2.847932752400164e-06, |
|
"loss": 11.9359, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.08431293068504256, |
|
"grad_norm": 0.054183050990104675, |
|
"learning_rate": 2.761321158169134e-06, |
|
"loss": 11.9378, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.08593433319821646, |
|
"grad_norm": 0.06125367805361748, |
|
"learning_rate": 2.6743911843603134e-06, |
|
"loss": 11.9401, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.08755573571139036, |
|
"grad_norm": 0.07734334468841553, |
|
"learning_rate": 2.587248741756253e-06, |
|
"loss": 11.9367, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.08917713822456425, |
|
"grad_norm": 0.07164815068244934, |
|
"learning_rate": 2.5e-06, |
|
"loss": 11.9412, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.09079854073773815, |
|
"grad_norm": 0.07205943763256073, |
|
"learning_rate": 2.4127512582437486e-06, |
|
"loss": 11.9377, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.09241994325091205, |
|
"grad_norm": 0.06834626942873001, |
|
"learning_rate": 2.325608815639687e-06, |
|
"loss": 11.9395, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.09404134576408593, |
|
"grad_norm": 0.07087212055921555, |
|
"learning_rate": 2.238678841830867e-06, |
|
"loss": 11.9404, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.09566274827725983, |
|
"grad_norm": 0.06267399340867996, |
|
"learning_rate": 2.1520672475998374e-06, |
|
"loss": 11.9416, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.09728415079043372, |
|
"grad_norm": 0.05978712439537048, |
|
"learning_rate": 2.0658795558326745e-06, |
|
"loss": 11.9396, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.09890555330360762, |
|
"grad_norm": 0.06278885900974274, |
|
"learning_rate": 1.9802207729556023e-06, |
|
"loss": 11.938, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.10052695581678152, |
|
"grad_norm": 0.06936422735452652, |
|
"learning_rate": 1.895195261000831e-06, |
|
"loss": 11.9391, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.1021483583299554, |
|
"grad_norm": 0.06972584128379822, |
|
"learning_rate": 1.8109066104575023e-06, |
|
"loss": 11.9379, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.1037697608431293, |
|
"grad_norm": 0.06474191695451736, |
|
"learning_rate": 1.7274575140626318e-06, |
|
"loss": 11.9415, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.1053911633563032, |
|
"grad_norm": 0.0643126517534256, |
|
"learning_rate": 1.6449496416858285e-06, |
|
"loss": 11.9368, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.10701256586947709, |
|
"grad_norm": 0.060131411999464035, |
|
"learning_rate": 1.56348351646022e-06, |
|
"loss": 11.9366, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.10863396838265099, |
|
"grad_norm": 0.07099177688360214, |
|
"learning_rate": 1.4831583923105e-06, |
|
"loss": 11.9369, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.11025537089582489, |
|
"grad_norm": 0.0853051170706749, |
|
"learning_rate": 1.4040721330273063e-06, |
|
"loss": 11.9387, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.11187677340899878, |
|
"grad_norm": 0.07782307267189026, |
|
"learning_rate": 1.3263210930352737e-06, |
|
"loss": 11.9346, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.11349817592217268, |
|
"grad_norm": 0.07038868218660355, |
|
"learning_rate": 1.2500000000000007e-06, |
|
"loss": 11.9369, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.11511957843534658, |
|
"grad_norm": 0.06611315160989761, |
|
"learning_rate": 1.1752018394169882e-06, |
|
"loss": 11.9369, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.11674098094852046, |
|
"grad_norm": 0.06813161075115204, |
|
"learning_rate": 1.1020177413231334e-06, |
|
"loss": 11.9355, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.11836238346169436, |
|
"grad_norm": 0.07219278067350388, |
|
"learning_rate": 1.0305368692688175e-06, |
|
"loss": 11.9318, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.11998378597486827, |
|
"grad_norm": 0.07824105024337769, |
|
"learning_rate": 9.608463116858544e-07, |
|
"loss": 11.9312, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.12160518848804215, |
|
"grad_norm": 0.08129618316888809, |
|
"learning_rate": 8.930309757836517e-07, |
|
"loss": 11.9319, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.12322659100121605, |
|
"grad_norm": 0.06890939921140671, |
|
"learning_rate": 8.271734841028553e-07, |
|
"loss": 11.9365, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.12484799351438995, |
|
"grad_norm": 0.08247758448123932, |
|
"learning_rate": 7.633540738525066e-07, |
|
"loss": 11.9334, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.12646939602756385, |
|
"grad_norm": 0.09313575178384781, |
|
"learning_rate": 7.016504991533727e-07, |
|
"loss": 11.9232, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.12809079854073774, |
|
"grad_norm": 0.05831461772322655, |
|
"learning_rate": 6.421379363065142e-07, |
|
"loss": 11.9322, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.12971220105391162, |
|
"grad_norm": 0.06037887558341026, |
|
"learning_rate": 5.848888922025553e-07, |
|
"loss": 11.9308, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.13133360356708554, |
|
"grad_norm": 0.07506049424409866, |
|
"learning_rate": 5.299731159831953e-07, |
|
"loss": 11.9268, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.13295500608025942, |
|
"grad_norm": 0.0684242770075798, |
|
"learning_rate": 4.774575140626317e-07, |
|
"loss": 11.9232, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.1345764085934333, |
|
"grad_norm": 0.07387068122625351, |
|
"learning_rate": 4.27406068612396e-07, |
|
"loss": 11.9302, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.13619781110660723, |
|
"grad_norm": 0.09172695875167847, |
|
"learning_rate": 3.798797596089351e-07, |
|
"loss": 11.9247, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.1378192136197811, |
|
"grad_norm": 0.07359951734542847, |
|
"learning_rate": 3.3493649053890325e-07, |
|
"loss": 11.9305, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.139440616132955, |
|
"grad_norm": 0.09183831512928009, |
|
"learning_rate": 2.9263101785268253e-07, |
|
"loss": 11.9322, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.1410620186461289, |
|
"grad_norm": 0.0872773602604866, |
|
"learning_rate": 2.53014884252083e-07, |
|
"loss": 11.9244, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.1426834211593028, |
|
"grad_norm": 0.10315699130296707, |
|
"learning_rate": 2.1613635589349756e-07, |
|
"loss": 11.9244, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.14430482367247668, |
|
"grad_norm": 0.10300548374652863, |
|
"learning_rate": 1.8204036358303173e-07, |
|
"loss": 11.9305, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.1459262261856506, |
|
"grad_norm": 0.09078492969274521, |
|
"learning_rate": 1.507684480352292e-07, |
|
"loss": 11.9307, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.14754762869882448, |
|
"grad_norm": 0.08630198240280151, |
|
"learning_rate": 1.223587092621162e-07, |
|
"loss": 11.9279, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.14916903121199837, |
|
"grad_norm": 0.11776189506053925, |
|
"learning_rate": 9.684576015420277e-08, |
|
"loss": 11.928, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.15079043372517228, |
|
"grad_norm": 0.11010502278804779, |
|
"learning_rate": 7.426068431000883e-08, |
|
"loss": 11.9303, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.15241183623834617, |
|
"grad_norm": 0.10045027732849121, |
|
"learning_rate": 5.463099816548578e-08, |
|
"loss": 11.9332, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.15403323875152006, |
|
"grad_norm": 0.10060346871614456, |
|
"learning_rate": 3.798061746947995e-08, |
|
"loss": 11.9345, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.15565464126469397, |
|
"grad_norm": 0.092365562915802, |
|
"learning_rate": 2.4329828146074096e-08, |
|
"loss": 11.9306, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.15727604377786786, |
|
"grad_norm": 0.13334056735038757, |
|
"learning_rate": 1.3695261579316776e-08, |
|
"loss": 11.9357, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.15889744629104174, |
|
"grad_norm": 0.1075684055685997, |
|
"learning_rate": 6.089874350439507e-09, |
|
"loss": 11.9331, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.16051884880421566, |
|
"grad_norm": 0.1268085241317749, |
|
"learning_rate": 1.5229324522605949e-09, |
|
"loss": 11.936, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.16214025131738954, |
|
"grad_norm": 0.13361109793186188, |
|
"learning_rate": 0.0, |
|
"loss": 11.9422, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.16214025131738954, |
|
"eval_loss": 11.932085037231445, |
|
"eval_runtime": 5.5292, |
|
"eval_samples_per_second": 187.91, |
|
"eval_steps_per_second": 23.511, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 484493230080.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|