|
{ |
|
"best_metric": 0.03529293090105057, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 1.5584415584415585, |
|
"eval_steps": 50, |
|
"global_step": 150, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01038961038961039, |
|
"grad_norm": 2.5287673473358154, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3939, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01038961038961039, |
|
"eval_loss": 0.863274872303009, |
|
"eval_runtime": 3.0094, |
|
"eval_samples_per_second": 53.832, |
|
"eval_steps_per_second": 13.624, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02077922077922078, |
|
"grad_norm": 2.515570878982544, |
|
"learning_rate": 2e-05, |
|
"loss": 0.3581, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.03116883116883117, |
|
"grad_norm": 2.9448156356811523, |
|
"learning_rate": 3e-05, |
|
"loss": 0.3876, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.04155844155844156, |
|
"grad_norm": 3.0535359382629395, |
|
"learning_rate": 4e-05, |
|
"loss": 0.3763, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.05194805194805195, |
|
"grad_norm": 2.8496756553649902, |
|
"learning_rate": 5e-05, |
|
"loss": 0.2602, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.06233766233766234, |
|
"grad_norm": 1.6722943782806396, |
|
"learning_rate": 6e-05, |
|
"loss": 0.2136, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.07272727272727272, |
|
"grad_norm": 1.7854785919189453, |
|
"learning_rate": 7e-05, |
|
"loss": 0.1442, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.08311688311688312, |
|
"grad_norm": 1.660981297492981, |
|
"learning_rate": 8e-05, |
|
"loss": 0.1745, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.09350649350649351, |
|
"grad_norm": 1.4793494939804077, |
|
"learning_rate": 9e-05, |
|
"loss": 0.1098, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.1038961038961039, |
|
"grad_norm": 1.1051626205444336, |
|
"learning_rate": 0.0001, |
|
"loss": 0.1041, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.11428571428571428, |
|
"grad_norm": 1.1426607370376587, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 0.1, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.12467532467532468, |
|
"grad_norm": 1.4962877035140991, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 0.098, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.13506493506493505, |
|
"grad_norm": 0.8829529285430908, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 0.0939, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.14545454545454545, |
|
"grad_norm": 0.9724581837654114, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 0.0733, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.15584415584415584, |
|
"grad_norm": 1.5023114681243896, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 0.1164, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.16623376623376623, |
|
"grad_norm": 0.8281694054603577, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 0.0674, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.17662337662337663, |
|
"grad_norm": 1.2017107009887695, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 0.0987, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.18701298701298702, |
|
"grad_norm": 1.3279019594192505, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 0.0972, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.1974025974025974, |
|
"grad_norm": 1.3757394552230835, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 0.0886, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.2077922077922078, |
|
"grad_norm": 1.4151781797409058, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 0.0918, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.21818181818181817, |
|
"grad_norm": 1.22770357131958, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 0.067, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.22857142857142856, |
|
"grad_norm": 2.3439786434173584, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 0.1662, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.23896103896103896, |
|
"grad_norm": 1.457555890083313, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 0.0722, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.24935064935064935, |
|
"grad_norm": 1.6406139135360718, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 0.0891, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.2597402597402597, |
|
"grad_norm": 0.8812335133552551, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 0.116, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.2701298701298701, |
|
"grad_norm": 0.6572224497795105, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 0.0987, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.2805194805194805, |
|
"grad_norm": 0.3550848066806793, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 0.0432, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.2909090909090909, |
|
"grad_norm": 0.6117568016052246, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 0.0882, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.3012987012987013, |
|
"grad_norm": 0.6368898153305054, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.0664, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.3116883116883117, |
|
"grad_norm": 0.66350919008255, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 0.0514, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.3220779220779221, |
|
"grad_norm": 0.48356160521507263, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 0.0431, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.33246753246753247, |
|
"grad_norm": 0.45327234268188477, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 0.0452, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.34285714285714286, |
|
"grad_norm": 0.5258204340934753, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 0.0499, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.35324675324675325, |
|
"grad_norm": 0.6655260324478149, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 0.0515, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 0.43133464455604553, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 0.0428, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.37402597402597404, |
|
"grad_norm": 0.470022976398468, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 0.0404, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.38441558441558443, |
|
"grad_norm": 0.6083030104637146, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 0.0354, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.3948051948051948, |
|
"grad_norm": 0.7614169716835022, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 0.0723, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.4051948051948052, |
|
"grad_norm": 0.5447736978530884, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 0.048, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.4155844155844156, |
|
"grad_norm": 1.0038998126983643, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 0.0762, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.42597402597402595, |
|
"grad_norm": 0.6833203434944153, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 0.0451, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.43636363636363634, |
|
"grad_norm": 0.7687332630157471, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 0.069, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.44675324675324674, |
|
"grad_norm": 1.1317681074142456, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 0.099, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.45714285714285713, |
|
"grad_norm": 0.8176448941230774, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 0.0691, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.4675324675324675, |
|
"grad_norm": 0.7559006214141846, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 0.0228, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.4779220779220779, |
|
"grad_norm": 1.1316182613372803, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 0.0376, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.4883116883116883, |
|
"grad_norm": 1.1144359111785889, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 0.0485, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.4987012987012987, |
|
"grad_norm": 0.7029817700386047, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.0272, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.509090909090909, |
|
"grad_norm": 0.5952286124229431, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 0.0727, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.5194805194805194, |
|
"grad_norm": 0.31108468770980835, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 0.0345, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.5194805194805194, |
|
"eval_loss": 0.05142683908343315, |
|
"eval_runtime": 3.0213, |
|
"eval_samples_per_second": 53.619, |
|
"eval_steps_per_second": 13.57, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.5298701298701298, |
|
"grad_norm": 0.5121963620185852, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 0.0875, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.5402597402597402, |
|
"grad_norm": 0.4110298156738281, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 0.0579, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.5506493506493506, |
|
"grad_norm": 0.3747623860836029, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 0.0419, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.561038961038961, |
|
"grad_norm": 0.25905701518058777, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 0.0362, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.5714285714285714, |
|
"grad_norm": 0.43626976013183594, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 0.0554, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.5818181818181818, |
|
"grad_norm": 0.40240776538848877, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 0.0356, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.5922077922077922, |
|
"grad_norm": 0.5400237441062927, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 0.0575, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.6025974025974026, |
|
"grad_norm": 0.386448472738266, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 0.0323, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.612987012987013, |
|
"grad_norm": 0.40781375765800476, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 0.0436, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.6233766233766234, |
|
"grad_norm": 0.59367835521698, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 0.0499, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.6337662337662338, |
|
"grad_norm": 0.4403911530971527, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 0.0427, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.6441558441558441, |
|
"grad_norm": 0.38498446345329285, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 0.0415, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.6545454545454545, |
|
"grad_norm": 0.5406155586242676, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 0.0493, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.6649350649350649, |
|
"grad_norm": 0.45152103900909424, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 0.0281, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.6753246753246753, |
|
"grad_norm": 0.5759983062744141, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 0.0381, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.6857142857142857, |
|
"grad_norm": 0.612442135810852, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 0.0646, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.6961038961038961, |
|
"grad_norm": 0.7097983956336975, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.0343, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.7064935064935065, |
|
"grad_norm": 0.7125105857849121, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 0.0305, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.7168831168831169, |
|
"grad_norm": 1.0859618186950684, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 0.0663, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 1.1588716506958008, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 0.0816, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.7376623376623377, |
|
"grad_norm": 1.3379589319229126, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 0.0485, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.7480519480519481, |
|
"grad_norm": 0.8509228825569153, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 0.0244, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.7584415584415585, |
|
"grad_norm": 0.3408161997795105, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 0.0693, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.7688311688311689, |
|
"grad_norm": 0.39925113320350647, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 0.0349, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.7792207792207793, |
|
"grad_norm": 0.46203750371932983, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 0.0479, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.7896103896103897, |
|
"grad_norm": 0.32868289947509766, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 0.0377, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.4701736867427826, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 0.0538, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.8103896103896104, |
|
"grad_norm": 0.3084734380245209, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 0.0266, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.8207792207792208, |
|
"grad_norm": 0.2898675501346588, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 0.0273, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.8311688311688312, |
|
"grad_norm": 0.4772960841655731, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 0.0411, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.8415584415584415, |
|
"grad_norm": 0.41926559805870056, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 0.0357, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.8519480519480519, |
|
"grad_norm": 0.4771142899990082, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 0.0355, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.8623376623376623, |
|
"grad_norm": 0.40837517380714417, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 0.0477, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.8727272727272727, |
|
"grad_norm": 0.3554452657699585, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 0.0279, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.8831168831168831, |
|
"grad_norm": 0.5350756645202637, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 0.0522, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.8935064935064935, |
|
"grad_norm": 0.29785236716270447, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.0256, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.9038961038961039, |
|
"grad_norm": 0.34512704610824585, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 0.0174, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.9142857142857143, |
|
"grad_norm": 0.5753390192985535, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 0.0507, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.9246753246753247, |
|
"grad_norm": 0.4211437404155731, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 0.0258, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.935064935064935, |
|
"grad_norm": 0.837975800037384, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 0.0715, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.9454545454545454, |
|
"grad_norm": 0.7448059916496277, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 0.0461, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.9558441558441558, |
|
"grad_norm": 0.9707018136978149, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 0.0633, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.9662337662337662, |
|
"grad_norm": 0.775959849357605, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 0.0466, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.9766233766233766, |
|
"grad_norm": 1.0919564962387085, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 0.0635, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.987012987012987, |
|
"grad_norm": 0.6115449070930481, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 0.0233, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.9974025974025974, |
|
"grad_norm": 0.8659562468528748, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 0.0386, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.0077922077922077, |
|
"grad_norm": 1.6310852766036987, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 0.0751, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.018181818181818, |
|
"grad_norm": 0.3640834093093872, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 0.038, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.0285714285714285, |
|
"grad_norm": 0.2505420744419098, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 0.0199, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.0389610389610389, |
|
"grad_norm": 0.3508344292640686, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 0.0363, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.0389610389610389, |
|
"eval_loss": 0.03529293090105057, |
|
"eval_runtime": 2.9677, |
|
"eval_samples_per_second": 54.587, |
|
"eval_steps_per_second": 13.815, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.0493506493506493, |
|
"grad_norm": 0.20951582491397858, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 0.0204, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.0597402597402596, |
|
"grad_norm": 0.29206031560897827, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 0.0325, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.07012987012987, |
|
"grad_norm": 0.281766802072525, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 0.0255, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.0805194805194804, |
|
"grad_norm": 0.1848578006029129, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 0.0128, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.0909090909090908, |
|
"grad_norm": 0.2879084050655365, |
|
"learning_rate": 5e-05, |
|
"loss": 0.0152, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.1012987012987012, |
|
"grad_norm": 0.4555622935295105, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 0.0222, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.1116883116883116, |
|
"grad_norm": 0.27269798517227173, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 0.012, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.122077922077922, |
|
"grad_norm": 0.13104607164859772, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 0.0057, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.1324675324675324, |
|
"grad_norm": 0.14721709489822388, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 0.0084, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.1428571428571428, |
|
"grad_norm": 0.15247803926467896, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 0.0083, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.1532467532467532, |
|
"grad_norm": 0.18574222922325134, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 0.0102, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.1636363636363636, |
|
"grad_norm": 0.20942085981369019, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 0.0143, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.174025974025974, |
|
"grad_norm": 0.18828628957271576, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 0.0096, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.1844155844155844, |
|
"grad_norm": 0.5457435250282288, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 0.0412, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.1948051948051948, |
|
"grad_norm": 0.34286874532699585, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 0.0152, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.2051948051948052, |
|
"grad_norm": 0.609895646572113, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 0.024, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.2155844155844155, |
|
"grad_norm": 0.33236628770828247, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 0.0145, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.225974025974026, |
|
"grad_norm": 0.5444545745849609, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 0.0132, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.2363636363636363, |
|
"grad_norm": 1.0387717485427856, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 0.0281, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.2467532467532467, |
|
"grad_norm": 0.9034120440483093, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 0.0114, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.2571428571428571, |
|
"grad_norm": 0.37075698375701904, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 0.0628, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.2675324675324675, |
|
"grad_norm": 0.28893956542015076, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 0.0349, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.277922077922078, |
|
"grad_norm": 0.23542575538158417, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 0.02, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.2883116883116883, |
|
"grad_norm": 0.16930247843265533, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.0137, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.2987012987012987, |
|
"grad_norm": 0.28762128949165344, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 0.0339, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.309090909090909, |
|
"grad_norm": 0.2217499017715454, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 0.0168, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.3194805194805195, |
|
"grad_norm": 0.19746264815330505, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 0.012, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.3298701298701299, |
|
"grad_norm": 0.32650619745254517, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 0.0135, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.3402597402597403, |
|
"grad_norm": 0.13054144382476807, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 0.0096, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.3506493506493507, |
|
"grad_norm": 0.23127451539039612, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 0.0136, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.361038961038961, |
|
"grad_norm": 0.4423576891422272, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 0.0179, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.3714285714285714, |
|
"grad_norm": 0.4975621998310089, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 0.0197, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.3818181818181818, |
|
"grad_norm": 0.40659210085868835, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 0.0269, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.3922077922077922, |
|
"grad_norm": 0.2875227630138397, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 0.0111, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.4025974025974026, |
|
"grad_norm": 0.39700427651405334, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 0.0191, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.412987012987013, |
|
"grad_norm": 0.5649827718734741, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 0.0138, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.4233766233766234, |
|
"grad_norm": 0.33105403184890747, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 0.0094, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.4337662337662338, |
|
"grad_norm": 0.2734118402004242, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 0.0098, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.4441558441558442, |
|
"grad_norm": 0.31457558274269104, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 0.0129, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.4545454545454546, |
|
"grad_norm": 0.14777958393096924, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 0.0069, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.464935064935065, |
|
"grad_norm": 0.13043536245822906, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 0.0037, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.4753246753246754, |
|
"grad_norm": 0.5898758172988892, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 0.0288, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.4857142857142858, |
|
"grad_norm": 0.21063728630542755, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.0049, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.4961038961038962, |
|
"grad_norm": 0.15052427351474762, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 0.0041, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.5064935064935066, |
|
"grad_norm": 0.22741664946079254, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 0.0263, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.516883116883117, |
|
"grad_norm": 0.2770758867263794, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 0.0277, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.5272727272727273, |
|
"grad_norm": 0.30176201462745667, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 0.0392, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.5376623376623377, |
|
"grad_norm": 0.24852502346038818, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 0.0304, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.5480519480519481, |
|
"grad_norm": 0.22542378306388855, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 0.0181, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.5584415584415585, |
|
"grad_norm": 0.2441638559103012, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 0.0238, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.5584415584415585, |
|
"eval_loss": 0.036358579993247986, |
|
"eval_runtime": 3.0235, |
|
"eval_samples_per_second": 53.581, |
|
"eval_steps_per_second": 13.561, |
|
"step": 150 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 1 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.2082999251959808e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|