|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.9741935483870967, |
|
"eval_steps": 500, |
|
"global_step": 308, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.025806451612903226, |
|
"grad_norm": 3.5142855644226074, |
|
"learning_rate": 0.00019870129870129872, |
|
"loss": 8.9214, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.05161290322580645, |
|
"grad_norm": 3.772717237472534, |
|
"learning_rate": 0.00019740259740259742, |
|
"loss": 8.3405, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.07741935483870968, |
|
"grad_norm": 6.7029924392700195, |
|
"learning_rate": 0.00019610389610389613, |
|
"loss": 8.1379, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.1032258064516129, |
|
"grad_norm": 3.7924954891204834, |
|
"learning_rate": 0.0001948051948051948, |
|
"loss": 5.913, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.12903225806451613, |
|
"grad_norm": 4.214364051818848, |
|
"learning_rate": 0.00019350649350649354, |
|
"loss": 7.0624, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.15483870967741936, |
|
"grad_norm": 3.0830295085906982, |
|
"learning_rate": 0.00019220779220779222, |
|
"loss": 6.6031, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.18064516129032257, |
|
"grad_norm": 2.293950080871582, |
|
"learning_rate": 0.00019090909090909092, |
|
"loss": 6.1543, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.2064516129032258, |
|
"grad_norm": 2.8121275901794434, |
|
"learning_rate": 0.00018961038961038963, |
|
"loss": 5.6952, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.23225806451612904, |
|
"grad_norm": 3.2042553424835205, |
|
"learning_rate": 0.00018831168831168833, |
|
"loss": 5.6916, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.25806451612903225, |
|
"grad_norm": 3.4788341522216797, |
|
"learning_rate": 0.000187012987012987, |
|
"loss": 5.575, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.2838709677419355, |
|
"grad_norm": 1.1461117267608643, |
|
"learning_rate": 0.00018571428571428572, |
|
"loss": 5.7922, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.3096774193548387, |
|
"grad_norm": 1.7745267152786255, |
|
"learning_rate": 0.00018441558441558442, |
|
"loss": 5.5945, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.33548387096774196, |
|
"grad_norm": 1.0893975496292114, |
|
"learning_rate": 0.00018311688311688313, |
|
"loss": 6.1593, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.36129032258064514, |
|
"grad_norm": 2.169384241104126, |
|
"learning_rate": 0.00018181818181818183, |
|
"loss": 5.8243, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.3870967741935484, |
|
"grad_norm": 1.8578031063079834, |
|
"learning_rate": 0.00018051948051948054, |
|
"loss": 5.5041, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.4129032258064516, |
|
"grad_norm": 1.448111653327942, |
|
"learning_rate": 0.00017922077922077922, |
|
"loss": 5.3471, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.43870967741935485, |
|
"grad_norm": 1.828591227531433, |
|
"learning_rate": 0.00017792207792207792, |
|
"loss": 5.0863, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.4645161290322581, |
|
"grad_norm": 4.83806848526001, |
|
"learning_rate": 0.00017662337662337663, |
|
"loss": 6.5823, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.49032258064516127, |
|
"grad_norm": 2.1270275115966797, |
|
"learning_rate": 0.00017532467532467534, |
|
"loss": 5.3268, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.5161290322580645, |
|
"grad_norm": 1.42393958568573, |
|
"learning_rate": 0.00017402597402597401, |
|
"loss": 5.4203, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.5419354838709678, |
|
"grad_norm": 2.768852472305298, |
|
"learning_rate": 0.00017272727272727275, |
|
"loss": 5.3403, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.567741935483871, |
|
"grad_norm": 0.7816085815429688, |
|
"learning_rate": 0.00017142857142857143, |
|
"loss": 5.6736, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.5935483870967742, |
|
"grad_norm": 0.8368009924888611, |
|
"learning_rate": 0.00017012987012987013, |
|
"loss": 5.4177, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.6193548387096774, |
|
"grad_norm": 1.7966578006744385, |
|
"learning_rate": 0.00016883116883116884, |
|
"loss": 5.8706, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.6451612903225806, |
|
"grad_norm": 0.694948673248291, |
|
"learning_rate": 0.00016753246753246754, |
|
"loss": 5.2186, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.6709677419354839, |
|
"grad_norm": 2.0851364135742188, |
|
"learning_rate": 0.00016623376623376625, |
|
"loss": 5.452, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.6967741935483871, |
|
"grad_norm": 1.705665946006775, |
|
"learning_rate": 0.00016493506493506495, |
|
"loss": 5.5373, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.7225806451612903, |
|
"grad_norm": 1.5386658906936646, |
|
"learning_rate": 0.00016363636363636366, |
|
"loss": 5.8638, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.7483870967741936, |
|
"grad_norm": 1.3625737428665161, |
|
"learning_rate": 0.00016233766233766234, |
|
"loss": 5.2913, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.7741935483870968, |
|
"grad_norm": 1.357426643371582, |
|
"learning_rate": 0.00016103896103896104, |
|
"loss": 5.7317, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.1882686614990234, |
|
"learning_rate": 0.00015974025974025975, |
|
"loss": 5.9749, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.8258064516129032, |
|
"grad_norm": 1.0777816772460938, |
|
"learning_rate": 0.00015844155844155845, |
|
"loss": 5.4791, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.8516129032258064, |
|
"grad_norm": 0.7732660174369812, |
|
"learning_rate": 0.00015714285714285716, |
|
"loss": 5.9732, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.8774193548387097, |
|
"grad_norm": 1.0730479955673218, |
|
"learning_rate": 0.00015584415584415587, |
|
"loss": 5.4338, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.9032258064516129, |
|
"grad_norm": 1.4834214448928833, |
|
"learning_rate": 0.00015454545454545454, |
|
"loss": 5.1802, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.9290322580645162, |
|
"grad_norm": 2.2870066165924072, |
|
"learning_rate": 0.00015324675324675325, |
|
"loss": 6.3573, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.9548387096774194, |
|
"grad_norm": 0.935196578502655, |
|
"learning_rate": 0.00015194805194805196, |
|
"loss": 5.0633, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.9806451612903225, |
|
"grad_norm": 0.994687020778656, |
|
"learning_rate": 0.00015064935064935066, |
|
"loss": 5.8048, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.0064516129032257, |
|
"grad_norm": 2.274411201477051, |
|
"learning_rate": 0.00014935064935064934, |
|
"loss": 4.6835, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.032258064516129, |
|
"grad_norm": 1.2067323923110962, |
|
"learning_rate": 0.00014805194805194807, |
|
"loss": 5.4713, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.0580645161290323, |
|
"grad_norm": 0.9473636150360107, |
|
"learning_rate": 0.00014675324675324675, |
|
"loss": 5.8508, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.0838709677419356, |
|
"grad_norm": 5.7191996574401855, |
|
"learning_rate": 0.00014545454545454546, |
|
"loss": 4.9011, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.1096774193548387, |
|
"grad_norm": 2.201740026473999, |
|
"learning_rate": 0.00014415584415584416, |
|
"loss": 5.4717, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.135483870967742, |
|
"grad_norm": 1.0441229343414307, |
|
"learning_rate": 0.00014285714285714287, |
|
"loss": 5.5206, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.1612903225806452, |
|
"grad_norm": 1.0501593351364136, |
|
"learning_rate": 0.00014155844155844155, |
|
"loss": 5.2199, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.1870967741935483, |
|
"grad_norm": 1.1907446384429932, |
|
"learning_rate": 0.00014025974025974028, |
|
"loss": 4.8319, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.2129032258064516, |
|
"grad_norm": 4.133155345916748, |
|
"learning_rate": 0.00013896103896103896, |
|
"loss": 5.1049, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.238709677419355, |
|
"grad_norm": 1.399916172027588, |
|
"learning_rate": 0.00013766233766233766, |
|
"loss": 5.5704, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.2645161290322582, |
|
"grad_norm": 1.0567469596862793, |
|
"learning_rate": 0.00013636363636363637, |
|
"loss": 5.3263, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.2903225806451613, |
|
"grad_norm": 1.2196253538131714, |
|
"learning_rate": 0.00013506493506493507, |
|
"loss": 5.2236, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.3161290322580645, |
|
"grad_norm": 2.465505838394165, |
|
"learning_rate": 0.00013376623376623375, |
|
"loss": 5.0547, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.3419354838709676, |
|
"grad_norm": 0.9022129774093628, |
|
"learning_rate": 0.00013246753246753249, |
|
"loss": 5.8387, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.367741935483871, |
|
"grad_norm": 1.6524704694747925, |
|
"learning_rate": 0.0001311688311688312, |
|
"loss": 5.4351, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.3935483870967742, |
|
"grad_norm": 1.2133516073226929, |
|
"learning_rate": 0.00012987012987012987, |
|
"loss": 5.4708, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.4193548387096775, |
|
"grad_norm": 1.019903302192688, |
|
"learning_rate": 0.00012857142857142858, |
|
"loss": 5.8655, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.4451612903225808, |
|
"grad_norm": 1.1822901964187622, |
|
"learning_rate": 0.00012727272727272728, |
|
"loss": 5.3089, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.4709677419354839, |
|
"grad_norm": 0.8840041160583496, |
|
"learning_rate": 0.000125974025974026, |
|
"loss": 5.0657, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.4967741935483871, |
|
"grad_norm": 0.6745492219924927, |
|
"learning_rate": 0.00012467532467532467, |
|
"loss": 4.8004, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.5225806451612902, |
|
"grad_norm": 1.4586549997329712, |
|
"learning_rate": 0.0001233766233766234, |
|
"loss": 5.0572, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.5483870967741935, |
|
"grad_norm": 1.5153924226760864, |
|
"learning_rate": 0.00012207792207792208, |
|
"loss": 4.4382, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.5741935483870968, |
|
"grad_norm": 0.9981886148452759, |
|
"learning_rate": 0.0001207792207792208, |
|
"loss": 4.8087, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.7403278350830078, |
|
"learning_rate": 0.00011948051948051949, |
|
"loss": 5.2025, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.6258064516129034, |
|
"grad_norm": 1.1075459718704224, |
|
"learning_rate": 0.0001181818181818182, |
|
"loss": 4.946, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.6516129032258065, |
|
"grad_norm": 0.6300385594367981, |
|
"learning_rate": 0.00011688311688311689, |
|
"loss": 5.6817, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.6774193548387095, |
|
"grad_norm": 0.8701953291893005, |
|
"learning_rate": 0.00011558441558441559, |
|
"loss": 5.6561, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.7032258064516128, |
|
"grad_norm": 1.133817434310913, |
|
"learning_rate": 0.00011428571428571428, |
|
"loss": 5.6528, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.729032258064516, |
|
"grad_norm": 1.6048352718353271, |
|
"learning_rate": 0.000112987012987013, |
|
"loss": 4.8602, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.7548387096774194, |
|
"grad_norm": 1.2783055305480957, |
|
"learning_rate": 0.00011168831168831168, |
|
"loss": 4.1662, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.7806451612903227, |
|
"grad_norm": 1.6137133836746216, |
|
"learning_rate": 0.0001103896103896104, |
|
"loss": 5.2295, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.8064516129032258, |
|
"grad_norm": 0.6374461054801941, |
|
"learning_rate": 0.00010909090909090909, |
|
"loss": 5.3992, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.832258064516129, |
|
"grad_norm": 0.8323061466217041, |
|
"learning_rate": 0.0001077922077922078, |
|
"loss": 5.1712, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.8580645161290321, |
|
"grad_norm": 0.6804484724998474, |
|
"learning_rate": 0.00010649350649350649, |
|
"loss": 5.3369, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.8838709677419354, |
|
"grad_norm": 0.9051455855369568, |
|
"learning_rate": 0.0001051948051948052, |
|
"loss": 5.3726, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.9096774193548387, |
|
"grad_norm": 0.5890415906906128, |
|
"learning_rate": 0.00010389610389610389, |
|
"loss": 5.4903, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.935483870967742, |
|
"grad_norm": 0.5433252453804016, |
|
"learning_rate": 0.00010259740259740261, |
|
"loss": 5.4437, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.9612903225806453, |
|
"grad_norm": 1.353121042251587, |
|
"learning_rate": 0.0001012987012987013, |
|
"loss": 4.1137, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.9870967741935484, |
|
"grad_norm": 1.0199609994888306, |
|
"learning_rate": 0.0001, |
|
"loss": 5.2558, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 2.0129032258064514, |
|
"grad_norm": 0.6021209359169006, |
|
"learning_rate": 9.870129870129871e-05, |
|
"loss": 5.5338, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.0387096774193547, |
|
"grad_norm": 0.9102515578269958, |
|
"learning_rate": 9.74025974025974e-05, |
|
"loss": 4.5988, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 2.064516129032258, |
|
"grad_norm": 0.7101506590843201, |
|
"learning_rate": 9.610389610389611e-05, |
|
"loss": 5.2622, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.0903225806451613, |
|
"grad_norm": 0.631308913230896, |
|
"learning_rate": 9.480519480519481e-05, |
|
"loss": 5.4718, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 2.1161290322580646, |
|
"grad_norm": 0.7462102770805359, |
|
"learning_rate": 9.35064935064935e-05, |
|
"loss": 5.0117, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.141935483870968, |
|
"grad_norm": 0.6367154717445374, |
|
"learning_rate": 9.220779220779221e-05, |
|
"loss": 4.9968, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.167741935483871, |
|
"grad_norm": 0.6959227323532104, |
|
"learning_rate": 9.090909090909092e-05, |
|
"loss": 5.6022, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.193548387096774, |
|
"grad_norm": 0.6119377613067627, |
|
"learning_rate": 8.961038961038961e-05, |
|
"loss": 4.9405, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.2193548387096773, |
|
"grad_norm": 0.6411863565444946, |
|
"learning_rate": 8.831168831168831e-05, |
|
"loss": 5.1413, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 2.2451612903225806, |
|
"grad_norm": 1.615324854850769, |
|
"learning_rate": 8.701298701298701e-05, |
|
"loss": 4.7308, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 2.270967741935484, |
|
"grad_norm": 0.5708146691322327, |
|
"learning_rate": 8.571428571428571e-05, |
|
"loss": 5.4497, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 2.296774193548387, |
|
"grad_norm": 0.8313891291618347, |
|
"learning_rate": 8.441558441558442e-05, |
|
"loss": 5.3687, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 2.3225806451612905, |
|
"grad_norm": 1.1731419563293457, |
|
"learning_rate": 8.311688311688312e-05, |
|
"loss": 5.3314, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.3483870967741938, |
|
"grad_norm": 0.7381497025489807, |
|
"learning_rate": 8.181818181818183e-05, |
|
"loss": 5.1178, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 2.3741935483870966, |
|
"grad_norm": 0.6883618831634521, |
|
"learning_rate": 8.051948051948052e-05, |
|
"loss": 5.9266, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.735080361366272, |
|
"learning_rate": 7.922077922077923e-05, |
|
"loss": 4.7453, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 2.425806451612903, |
|
"grad_norm": 0.6184589862823486, |
|
"learning_rate": 7.792207792207793e-05, |
|
"loss": 5.0924, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 2.4516129032258065, |
|
"grad_norm": 0.6305899024009705, |
|
"learning_rate": 7.662337662337662e-05, |
|
"loss": 5.1167, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.47741935483871, |
|
"grad_norm": 0.815334677696228, |
|
"learning_rate": 7.532467532467533e-05, |
|
"loss": 5.4719, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 2.5032258064516126, |
|
"grad_norm": 0.6805923581123352, |
|
"learning_rate": 7.402597402597404e-05, |
|
"loss": 4.4392, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 2.5290322580645164, |
|
"grad_norm": 0.6633741855621338, |
|
"learning_rate": 7.272727272727273e-05, |
|
"loss": 5.1926, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 2.554838709677419, |
|
"grad_norm": 0.6042450666427612, |
|
"learning_rate": 7.142857142857143e-05, |
|
"loss": 4.7079, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 2.5806451612903225, |
|
"grad_norm": 0.5892207622528076, |
|
"learning_rate": 7.012987012987014e-05, |
|
"loss": 4.9938, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.606451612903226, |
|
"grad_norm": 0.6753908395767212, |
|
"learning_rate": 6.883116883116883e-05, |
|
"loss": 5.0097, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 2.632258064516129, |
|
"grad_norm": 0.5345973968505859, |
|
"learning_rate": 6.753246753246754e-05, |
|
"loss": 5.1074, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 2.6580645161290324, |
|
"grad_norm": 0.4924313724040985, |
|
"learning_rate": 6.623376623376624e-05, |
|
"loss": 5.0817, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 2.6838709677419352, |
|
"grad_norm": 0.5989976525306702, |
|
"learning_rate": 6.493506493506494e-05, |
|
"loss": 4.2039, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 2.709677419354839, |
|
"grad_norm": 1.0730255842208862, |
|
"learning_rate": 6.363636363636364e-05, |
|
"loss": 4.2224, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.735483870967742, |
|
"grad_norm": 0.991038978099823, |
|
"learning_rate": 6.233766233766233e-05, |
|
"loss": 4.8869, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 2.761290322580645, |
|
"grad_norm": 0.5952357053756714, |
|
"learning_rate": 6.103896103896104e-05, |
|
"loss": 5.1753, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 2.7870967741935484, |
|
"grad_norm": 0.6798732876777649, |
|
"learning_rate": 5.9740259740259744e-05, |
|
"loss": 4.3396, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 2.8129032258064517, |
|
"grad_norm": 0.5361295342445374, |
|
"learning_rate": 5.844155844155844e-05, |
|
"loss": 5.2857, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 2.838709677419355, |
|
"grad_norm": 0.6326772570610046, |
|
"learning_rate": 5.714285714285714e-05, |
|
"loss": 4.7385, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.864516129032258, |
|
"grad_norm": 0.639589786529541, |
|
"learning_rate": 5.584415584415584e-05, |
|
"loss": 4.3803, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 2.8903225806451616, |
|
"grad_norm": 0.7248474955558777, |
|
"learning_rate": 5.4545454545454546e-05, |
|
"loss": 4.6668, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.9161290322580644, |
|
"grad_norm": 0.7551538348197937, |
|
"learning_rate": 5.3246753246753245e-05, |
|
"loss": 5.016, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.9419354838709677, |
|
"grad_norm": 0.4990728497505188, |
|
"learning_rate": 5.1948051948051944e-05, |
|
"loss": 5.0843, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.967741935483871, |
|
"grad_norm": 0.5729503035545349, |
|
"learning_rate": 5.064935064935065e-05, |
|
"loss": 4.573, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.9935483870967743, |
|
"grad_norm": 1.4705737829208374, |
|
"learning_rate": 4.9350649350649355e-05, |
|
"loss": 4.9812, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 3.0193548387096776, |
|
"grad_norm": 0.7053755521774292, |
|
"learning_rate": 4.8051948051948054e-05, |
|
"loss": 4.7651, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 3.0451612903225804, |
|
"grad_norm": 0.6120907664299011, |
|
"learning_rate": 4.675324675324675e-05, |
|
"loss": 5.2057, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 3.0709677419354837, |
|
"grad_norm": 0.6173492074012756, |
|
"learning_rate": 4.545454545454546e-05, |
|
"loss": 5.0342, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 3.096774193548387, |
|
"grad_norm": 0.5435605049133301, |
|
"learning_rate": 4.415584415584416e-05, |
|
"loss": 4.9538, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 3.1225806451612903, |
|
"grad_norm": 1.4004778861999512, |
|
"learning_rate": 4.2857142857142856e-05, |
|
"loss": 4.3856, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 3.1483870967741936, |
|
"grad_norm": 1.1300957202911377, |
|
"learning_rate": 4.155844155844156e-05, |
|
"loss": 5.0085, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 3.174193548387097, |
|
"grad_norm": 0.6826758980751038, |
|
"learning_rate": 4.025974025974026e-05, |
|
"loss": 5.3525, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 0.6162336468696594, |
|
"learning_rate": 3.8961038961038966e-05, |
|
"loss": 5.1639, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 3.225806451612903, |
|
"grad_norm": 0.7551366686820984, |
|
"learning_rate": 3.7662337662337665e-05, |
|
"loss": 5.1913, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 3.2516129032258063, |
|
"grad_norm": 0.5899360775947571, |
|
"learning_rate": 3.6363636363636364e-05, |
|
"loss": 5.022, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 3.2774193548387096, |
|
"grad_norm": 0.6666110157966614, |
|
"learning_rate": 3.506493506493507e-05, |
|
"loss": 4.697, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 3.303225806451613, |
|
"grad_norm": 0.5686184167861938, |
|
"learning_rate": 3.376623376623377e-05, |
|
"loss": 4.8758, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 3.329032258064516, |
|
"grad_norm": 0.7564727663993835, |
|
"learning_rate": 3.246753246753247e-05, |
|
"loss": 4.9468, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 3.3548387096774195, |
|
"grad_norm": 0.5773691534996033, |
|
"learning_rate": 3.1168831168831166e-05, |
|
"loss": 5.1122, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 3.3806451612903228, |
|
"grad_norm": 0.642393171787262, |
|
"learning_rate": 2.9870129870129872e-05, |
|
"loss": 5.233, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 3.4064516129032256, |
|
"grad_norm": 0.6513245701789856, |
|
"learning_rate": 2.857142857142857e-05, |
|
"loss": 4.2779, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 3.432258064516129, |
|
"grad_norm": 0.8267136812210083, |
|
"learning_rate": 2.7272727272727273e-05, |
|
"loss": 5.2998, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 3.458064516129032, |
|
"grad_norm": 0.5494163632392883, |
|
"learning_rate": 2.5974025974025972e-05, |
|
"loss": 5.4667, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 3.4838709677419355, |
|
"grad_norm": 0.7057967782020569, |
|
"learning_rate": 2.4675324675324678e-05, |
|
"loss": 5.1893, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 3.509677419354839, |
|
"grad_norm": 0.5400364398956299, |
|
"learning_rate": 2.3376623376623376e-05, |
|
"loss": 4.9972, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 3.535483870967742, |
|
"grad_norm": 0.4732670485973358, |
|
"learning_rate": 2.207792207792208e-05, |
|
"loss": 4.896, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 3.5612903225806454, |
|
"grad_norm": 0.5432953834533691, |
|
"learning_rate": 2.077922077922078e-05, |
|
"loss": 5.0452, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 3.587096774193548, |
|
"grad_norm": 0.5461270213127136, |
|
"learning_rate": 1.9480519480519483e-05, |
|
"loss": 4.7124, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 3.6129032258064515, |
|
"grad_norm": 0.6231604218482971, |
|
"learning_rate": 1.8181818181818182e-05, |
|
"loss": 4.3152, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 3.638709677419355, |
|
"grad_norm": 0.42820078134536743, |
|
"learning_rate": 1.6883116883116884e-05, |
|
"loss": 4.9029, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 3.664516129032258, |
|
"grad_norm": 0.7605751752853394, |
|
"learning_rate": 1.5584415584415583e-05, |
|
"loss": 4.8563, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 3.6903225806451614, |
|
"grad_norm": 0.8210684061050415, |
|
"learning_rate": 1.4285714285714285e-05, |
|
"loss": 3.7461, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 3.7161290322580647, |
|
"grad_norm": 0.6016200184822083, |
|
"learning_rate": 1.2987012987012986e-05, |
|
"loss": 4.7194, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 3.741935483870968, |
|
"grad_norm": 0.7171183228492737, |
|
"learning_rate": 1.1688311688311688e-05, |
|
"loss": 4.5957, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 3.767741935483871, |
|
"grad_norm": 0.6558433175086975, |
|
"learning_rate": 1.038961038961039e-05, |
|
"loss": 4.5994, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 3.793548387096774, |
|
"grad_norm": 0.5295835733413696, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 5.5671, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 3.8193548387096774, |
|
"grad_norm": 0.6055201292037964, |
|
"learning_rate": 7.792207792207792e-06, |
|
"loss": 5.0694, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 3.8451612903225807, |
|
"grad_norm": 0.6041186451911926, |
|
"learning_rate": 6.493506493506493e-06, |
|
"loss": 5.0007, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 3.870967741935484, |
|
"grad_norm": 0.7281818985939026, |
|
"learning_rate": 5.194805194805195e-06, |
|
"loss": 4.3153, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.896774193548387, |
|
"grad_norm": 0.4925851821899414, |
|
"learning_rate": 3.896103896103896e-06, |
|
"loss": 5.5631, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 3.9225806451612906, |
|
"grad_norm": 0.6916934847831726, |
|
"learning_rate": 2.5974025974025976e-06, |
|
"loss": 5.3932, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 3.9483870967741934, |
|
"grad_norm": 0.6561426520347595, |
|
"learning_rate": 1.2987012987012988e-06, |
|
"loss": 5.3373, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 3.9741935483870967, |
|
"grad_norm": 0.6102042198181152, |
|
"learning_rate": 0.0, |
|
"loss": 5.1064, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 3.9741935483870967, |
|
"step": 308, |
|
"total_flos": 723765039226044.0, |
|
"train_loss": 5.270187915145577, |
|
"train_runtime": 746.2778, |
|
"train_samples_per_second": 3.318, |
|
"train_steps_per_second": 0.413 |
|
} |
|
], |
|
"logging_steps": 2, |
|
"max_steps": 308, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 723765039226044.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|