diff --git "a/trainer_state.json" "b/trainer_state.json" new file mode 100644--- /dev/null +++ "b/trainer_state.json" @@ -0,0 +1,40831 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 1.0, + "eval_steps": 500, + "global_step": 5827, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.00017161489617298782, + "grad_norm": 30.668089447930665, + "learning_rate": 1.142857142857143e-07, + "loss": 4.5603, + "step": 1 + }, + { + "epoch": 0.00034322979234597563, + "grad_norm": 25.93890164506215, + "learning_rate": 2.285714285714286e-07, + "loss": 3.8759, + "step": 2 + }, + { + "epoch": 0.0005148446885189634, + "grad_norm": 36.96142326314757, + "learning_rate": 3.428571428571429e-07, + "loss": 4.7173, + "step": 3 + }, + { + "epoch": 0.0006864595846919513, + "grad_norm": 34.785801715634655, + "learning_rate": 4.571428571428572e-07, + "loss": 4.8091, + "step": 4 + }, + { + "epoch": 0.000858074480864939, + "grad_norm": 28.830290581224215, + "learning_rate": 5.714285714285715e-07, + "loss": 4.1964, + "step": 5 + }, + { + "epoch": 0.0010296893770379268, + "grad_norm": 22.707707298395775, + "learning_rate": 6.857142857142858e-07, + "loss": 4.1035, + "step": 6 + }, + { + "epoch": 0.0012013042732109147, + "grad_norm": 16.854320496736147, + "learning_rate": 8.000000000000001e-07, + "loss": 3.5957, + "step": 7 + }, + { + "epoch": 0.0013729191693839025, + "grad_norm": 26.88002151772228, + "learning_rate": 9.142857142857144e-07, + "loss": 4.4156, + "step": 8 + }, + { + "epoch": 0.0015445340655568904, + "grad_norm": 30.606793141390057, + "learning_rate": 1.0285714285714286e-06, + "loss": 4.5317, + "step": 9 + }, + { + "epoch": 0.001716148961729878, + "grad_norm": 29.070794542104583, + "learning_rate": 1.142857142857143e-06, + "loss": 4.4011, + "step": 10 + }, + { + "epoch": 0.001887763857902866, + "grad_norm": 29.849593762767654, + "learning_rate": 1.2571428571428573e-06, + "loss": 4.5002, + "step": 11 + }, + { + "epoch": 0.0020593787540758536, + "grad_norm": 24.231850340704394, + "learning_rate": 1.3714285714285717e-06, + "loss": 4.1261, + "step": 12 + }, + { + "epoch": 0.0022309936502488414, + "grad_norm": 21.178239012309003, + "learning_rate": 1.4857142857142858e-06, + "loss": 4.0331, + "step": 13 + }, + { + "epoch": 0.0024026085464218293, + "grad_norm": 17.404912868999176, + "learning_rate": 1.6000000000000001e-06, + "loss": 4.0249, + "step": 14 + }, + { + "epoch": 0.002574223442594817, + "grad_norm": 18.87587660804914, + "learning_rate": 1.7142857142857145e-06, + "loss": 3.9221, + "step": 15 + }, + { + "epoch": 0.002745838338767805, + "grad_norm": 13.678354367731275, + "learning_rate": 1.8285714285714288e-06, + "loss": 3.5854, + "step": 16 + }, + { + "epoch": 0.002917453234940793, + "grad_norm": 17.11526727983393, + "learning_rate": 1.942857142857143e-06, + "loss": 3.6071, + "step": 17 + }, + { + "epoch": 0.003089068131113781, + "grad_norm": 23.592418185912088, + "learning_rate": 2.0571428571428573e-06, + "loss": 3.444, + "step": 18 + }, + { + "epoch": 0.0032606830272867687, + "grad_norm": 14.012203304419085, + "learning_rate": 2.1714285714285716e-06, + "loss": 3.4874, + "step": 19 + }, + { + "epoch": 0.003432297923459756, + "grad_norm": 15.458808624656582, + "learning_rate": 2.285714285714286e-06, + "loss": 3.5889, + "step": 20 + }, + { + "epoch": 0.003603912819632744, + "grad_norm": 11.584259212284152, + "learning_rate": 2.4000000000000003e-06, + "loss": 3.3634, + "step": 21 + }, + { + "epoch": 0.003775527715805732, + "grad_norm": 14.249117085121204, + "learning_rate": 2.5142857142857147e-06, + "loss": 3.6045, + "step": 22 + }, + { + "epoch": 0.00394714261197872, + "grad_norm": 14.355055747286821, + "learning_rate": 2.6285714285714286e-06, + "loss": 3.4743, + "step": 23 + }, + { + "epoch": 0.004118757508151707, + "grad_norm": 15.986607989434727, + "learning_rate": 2.7428571428571433e-06, + "loss": 3.5071, + "step": 24 + }, + { + "epoch": 0.004290372404324695, + "grad_norm": 15.009222302006576, + "learning_rate": 2.8571428571428573e-06, + "loss": 3.4385, + "step": 25 + }, + { + "epoch": 0.004461987300497683, + "grad_norm": 15.00054408070156, + "learning_rate": 2.9714285714285716e-06, + "loss": 3.6082, + "step": 26 + }, + { + "epoch": 0.004633602196670671, + "grad_norm": 26.73604290038376, + "learning_rate": 3.085714285714286e-06, + "loss": 3.5343, + "step": 27 + }, + { + "epoch": 0.004805217092843659, + "grad_norm": 14.263684436385656, + "learning_rate": 3.2000000000000003e-06, + "loss": 3.1143, + "step": 28 + }, + { + "epoch": 0.0049768319890166465, + "grad_norm": 16.56950614762184, + "learning_rate": 3.314285714285714e-06, + "loss": 3.035, + "step": 29 + }, + { + "epoch": 0.005148446885189634, + "grad_norm": 14.087675339145806, + "learning_rate": 3.428571428571429e-06, + "loss": 3.1071, + "step": 30 + }, + { + "epoch": 0.005320061781362622, + "grad_norm": 12.840712423623462, + "learning_rate": 3.542857142857143e-06, + "loss": 3.0384, + "step": 31 + }, + { + "epoch": 0.00549167667753561, + "grad_norm": 17.39947494853309, + "learning_rate": 3.6571428571428576e-06, + "loss": 3.0405, + "step": 32 + }, + { + "epoch": 0.005663291573708598, + "grad_norm": 11.505629537032268, + "learning_rate": 3.771428571428572e-06, + "loss": 3.0336, + "step": 33 + }, + { + "epoch": 0.005834906469881586, + "grad_norm": 21.545273331248218, + "learning_rate": 3.885714285714286e-06, + "loss": 2.8799, + "step": 34 + }, + { + "epoch": 0.006006521366054574, + "grad_norm": 9.353883606209362, + "learning_rate": 4.000000000000001e-06, + "loss": 2.9076, + "step": 35 + }, + { + "epoch": 0.006178136262227562, + "grad_norm": 10.617631469620065, + "learning_rate": 4.114285714285715e-06, + "loss": 2.8172, + "step": 36 + }, + { + "epoch": 0.0063497511584005495, + "grad_norm": 11.770045423370412, + "learning_rate": 4.228571428571429e-06, + "loss": 2.8444, + "step": 37 + }, + { + "epoch": 0.006521366054573537, + "grad_norm": 11.645109020898436, + "learning_rate": 4.342857142857143e-06, + "loss": 2.8348, + "step": 38 + }, + { + "epoch": 0.006692980950746525, + "grad_norm": 10.90549455003319, + "learning_rate": 4.457142857142858e-06, + "loss": 2.6774, + "step": 39 + }, + { + "epoch": 0.006864595846919512, + "grad_norm": 14.637239016287888, + "learning_rate": 4.571428571428572e-06, + "loss": 2.7251, + "step": 40 + }, + { + "epoch": 0.0070362107430925, + "grad_norm": 13.431102067923531, + "learning_rate": 4.685714285714286e-06, + "loss": 2.6214, + "step": 41 + }, + { + "epoch": 0.007207825639265488, + "grad_norm": 8.653917935373755, + "learning_rate": 4.800000000000001e-06, + "loss": 2.6324, + "step": 42 + }, + { + "epoch": 0.007379440535438476, + "grad_norm": 16.187652389258293, + "learning_rate": 4.9142857142857145e-06, + "loss": 2.5056, + "step": 43 + }, + { + "epoch": 0.007551055431611464, + "grad_norm": 13.482647329846788, + "learning_rate": 5.028571428571429e-06, + "loss": 2.5724, + "step": 44 + }, + { + "epoch": 0.0077226703277844515, + "grad_norm": 12.801695593411296, + "learning_rate": 5.142857142857142e-06, + "loss": 2.2592, + "step": 45 + }, + { + "epoch": 0.00789428522395744, + "grad_norm": 9.137131346103367, + "learning_rate": 5.257142857142857e-06, + "loss": 2.5445, + "step": 46 + }, + { + "epoch": 0.008065900120130427, + "grad_norm": 33.6158256121922, + "learning_rate": 5.371428571428572e-06, + "loss": 2.3522, + "step": 47 + }, + { + "epoch": 0.008237515016303414, + "grad_norm": 25.862161525403106, + "learning_rate": 5.485714285714287e-06, + "loss": 2.4477, + "step": 48 + }, + { + "epoch": 0.008409129912476403, + "grad_norm": 12.286103253639418, + "learning_rate": 5.600000000000001e-06, + "loss": 2.407, + "step": 49 + }, + { + "epoch": 0.00858074480864939, + "grad_norm": 17.697613851103835, + "learning_rate": 5.7142857142857145e-06, + "loss": 2.4449, + "step": 50 + }, + { + "epoch": 0.008752359704822379, + "grad_norm": 15.87446154785113, + "learning_rate": 5.828571428571429e-06, + "loss": 2.5336, + "step": 51 + }, + { + "epoch": 0.008923974600995366, + "grad_norm": 8.762927637790245, + "learning_rate": 5.942857142857143e-06, + "loss": 2.3772, + "step": 52 + }, + { + "epoch": 0.009095589497168355, + "grad_norm": 13.553911318253906, + "learning_rate": 6.057142857142858e-06, + "loss": 2.512, + "step": 53 + }, + { + "epoch": 0.009267204393341342, + "grad_norm": 13.707503059814044, + "learning_rate": 6.171428571428572e-06, + "loss": 2.7997, + "step": 54 + }, + { + "epoch": 0.00943881928951433, + "grad_norm": 6.5694772116241476, + "learning_rate": 6.285714285714286e-06, + "loss": 2.552, + "step": 55 + }, + { + "epoch": 0.009610434185687317, + "grad_norm": 7.882295102416172, + "learning_rate": 6.4000000000000006e-06, + "loss": 2.6071, + "step": 56 + }, + { + "epoch": 0.009782049081860306, + "grad_norm": 6.763526523613429, + "learning_rate": 6.514285714285715e-06, + "loss": 2.3997, + "step": 57 + }, + { + "epoch": 0.009953663978033293, + "grad_norm": 7.0746264269142385, + "learning_rate": 6.628571428571428e-06, + "loss": 2.2753, + "step": 58 + }, + { + "epoch": 0.010125278874206282, + "grad_norm": 4.734415443054102, + "learning_rate": 6.742857142857143e-06, + "loss": 2.4382, + "step": 59 + }, + { + "epoch": 0.010296893770379269, + "grad_norm": 32.23294843067819, + "learning_rate": 6.857142857142858e-06, + "loss": 2.4524, + "step": 60 + }, + { + "epoch": 0.010468508666552257, + "grad_norm": 35.15987114461738, + "learning_rate": 6.971428571428573e-06, + "loss": 2.26, + "step": 61 + }, + { + "epoch": 0.010640123562725244, + "grad_norm": 11.266343217208767, + "learning_rate": 7.085714285714286e-06, + "loss": 2.5029, + "step": 62 + }, + { + "epoch": 0.010811738458898233, + "grad_norm": 6.319958231301672, + "learning_rate": 7.2000000000000005e-06, + "loss": 2.4196, + "step": 63 + }, + { + "epoch": 0.01098335335507122, + "grad_norm": 8.529824841325285, + "learning_rate": 7.314285714285715e-06, + "loss": 2.637, + "step": 64 + }, + { + "epoch": 0.011154968251244207, + "grad_norm": 8.812275589036922, + "learning_rate": 7.428571428571429e-06, + "loss": 2.4787, + "step": 65 + }, + { + "epoch": 0.011326583147417196, + "grad_norm": 9.688015408033584, + "learning_rate": 7.542857142857144e-06, + "loss": 2.2467, + "step": 66 + }, + { + "epoch": 0.011498198043590183, + "grad_norm": 9.16959427521578, + "learning_rate": 7.657142857142858e-06, + "loss": 2.4296, + "step": 67 + }, + { + "epoch": 0.011669812939763172, + "grad_norm": 7.62012774598611, + "learning_rate": 7.771428571428572e-06, + "loss": 2.3844, + "step": 68 + }, + { + "epoch": 0.011841427835936159, + "grad_norm": 7.769904677161206, + "learning_rate": 7.885714285714286e-06, + "loss": 2.372, + "step": 69 + }, + { + "epoch": 0.012013042732109147, + "grad_norm": 12.849978359761737, + "learning_rate": 8.000000000000001e-06, + "loss": 2.3955, + "step": 70 + }, + { + "epoch": 0.012184657628282134, + "grad_norm": 12.718571975350164, + "learning_rate": 8.114285714285715e-06, + "loss": 2.4096, + "step": 71 + }, + { + "epoch": 0.012356272524455123, + "grad_norm": 7.981698156226107, + "learning_rate": 8.22857142857143e-06, + "loss": 2.517, + "step": 72 + }, + { + "epoch": 0.01252788742062811, + "grad_norm": 5.642451140300902, + "learning_rate": 8.342857142857143e-06, + "loss": 2.5422, + "step": 73 + }, + { + "epoch": 0.012699502316801099, + "grad_norm": 14.312535444678016, + "learning_rate": 8.457142857142859e-06, + "loss": 2.4205, + "step": 74 + }, + { + "epoch": 0.012871117212974086, + "grad_norm": 8.508921817132261, + "learning_rate": 8.571428571428571e-06, + "loss": 2.4116, + "step": 75 + }, + { + "epoch": 0.013042732109147075, + "grad_norm": 6.281352630353262, + "learning_rate": 8.685714285714287e-06, + "loss": 2.315, + "step": 76 + }, + { + "epoch": 0.013214347005320062, + "grad_norm": 5.748376635296371, + "learning_rate": 8.8e-06, + "loss": 2.4858, + "step": 77 + }, + { + "epoch": 0.01338596190149305, + "grad_norm": 5.939096917118166, + "learning_rate": 8.914285714285716e-06, + "loss": 2.6016, + "step": 78 + }, + { + "epoch": 0.013557576797666037, + "grad_norm": 5.907623734872398, + "learning_rate": 9.028571428571428e-06, + "loss": 2.3556, + "step": 79 + }, + { + "epoch": 0.013729191693839024, + "grad_norm": 5.028447859126793, + "learning_rate": 9.142857142857144e-06, + "loss": 2.2946, + "step": 80 + }, + { + "epoch": 0.013900806590012013, + "grad_norm": 8.371317073248003, + "learning_rate": 9.257142857142858e-06, + "loss": 2.1278, + "step": 81 + }, + { + "epoch": 0.014072421486185, + "grad_norm": 4.831054716031191, + "learning_rate": 9.371428571428572e-06, + "loss": 2.4365, + "step": 82 + }, + { + "epoch": 0.014244036382357989, + "grad_norm": 5.63478432227459, + "learning_rate": 9.485714285714287e-06, + "loss": 2.4448, + "step": 83 + }, + { + "epoch": 0.014415651278530976, + "grad_norm": 4.044331484487414, + "learning_rate": 9.600000000000001e-06, + "loss": 2.152, + "step": 84 + }, + { + "epoch": 0.014587266174703965, + "grad_norm": 6.395798824205685, + "learning_rate": 9.714285714285715e-06, + "loss": 2.0984, + "step": 85 + }, + { + "epoch": 0.014758881070876952, + "grad_norm": 9.457886889171613, + "learning_rate": 9.828571428571429e-06, + "loss": 2.1834, + "step": 86 + }, + { + "epoch": 0.01493049596704994, + "grad_norm": 6.977054550828756, + "learning_rate": 9.942857142857145e-06, + "loss": 2.0698, + "step": 87 + }, + { + "epoch": 0.015102110863222927, + "grad_norm": 4.6302943156432965, + "learning_rate": 1.0057142857142859e-05, + "loss": 2.2912, + "step": 88 + }, + { + "epoch": 0.015273725759395916, + "grad_norm": 4.637121728781503, + "learning_rate": 1.0171428571428573e-05, + "loss": 2.4046, + "step": 89 + }, + { + "epoch": 0.015445340655568903, + "grad_norm": 4.969664179177465, + "learning_rate": 1.0285714285714285e-05, + "loss": 2.479, + "step": 90 + }, + { + "epoch": 0.015616955551741892, + "grad_norm": 4.676793232734069, + "learning_rate": 1.04e-05, + "loss": 2.4297, + "step": 91 + }, + { + "epoch": 0.01578857044791488, + "grad_norm": 9.070247616235994, + "learning_rate": 1.0514285714285714e-05, + "loss": 2.2559, + "step": 92 + }, + { + "epoch": 0.015960185344087868, + "grad_norm": 4.5134907813658955, + "learning_rate": 1.062857142857143e-05, + "loss": 2.2815, + "step": 93 + }, + { + "epoch": 0.016131800240260855, + "grad_norm": 4.632550177557325, + "learning_rate": 1.0742857142857144e-05, + "loss": 2.2171, + "step": 94 + }, + { + "epoch": 0.01630341513643384, + "grad_norm": 4.483767211266106, + "learning_rate": 1.0857142857142858e-05, + "loss": 2.0609, + "step": 95 + }, + { + "epoch": 0.01647503003260683, + "grad_norm": 5.813321926149965, + "learning_rate": 1.0971428571428573e-05, + "loss": 2.0531, + "step": 96 + }, + { + "epoch": 0.01664664492877982, + "grad_norm": 4.691478793006599, + "learning_rate": 1.1085714285714286e-05, + "loss": 2.4327, + "step": 97 + }, + { + "epoch": 0.016818259824952806, + "grad_norm": 16.13534090304662, + "learning_rate": 1.1200000000000001e-05, + "loss": 2.2145, + "step": 98 + }, + { + "epoch": 0.016989874721125793, + "grad_norm": 4.619099358239232, + "learning_rate": 1.1314285714285715e-05, + "loss": 2.2148, + "step": 99 + }, + { + "epoch": 0.01716148961729878, + "grad_norm": 6.83011343785692, + "learning_rate": 1.1428571428571429e-05, + "loss": 2.2016, + "step": 100 + }, + { + "epoch": 0.01733310451347177, + "grad_norm": 4.285660770520661, + "learning_rate": 1.1542857142857145e-05, + "loss": 2.2128, + "step": 101 + }, + { + "epoch": 0.017504719409644758, + "grad_norm": 6.34310824249887, + "learning_rate": 1.1657142857142859e-05, + "loss": 2.2913, + "step": 102 + }, + { + "epoch": 0.017676334305817745, + "grad_norm": 5.574044081010341, + "learning_rate": 1.177142857142857e-05, + "loss": 2.5, + "step": 103 + }, + { + "epoch": 0.01784794920199073, + "grad_norm": 7.788022677364951, + "learning_rate": 1.1885714285714286e-05, + "loss": 1.9828, + "step": 104 + }, + { + "epoch": 0.018019564098163722, + "grad_norm": 7.23517441529495, + "learning_rate": 1.2e-05, + "loss": 2.3067, + "step": 105 + }, + { + "epoch": 0.01819117899433671, + "grad_norm": 7.056350065099166, + "learning_rate": 1.2114285714285716e-05, + "loss": 2.1428, + "step": 106 + }, + { + "epoch": 0.018362793890509696, + "grad_norm": 5.066019952689693, + "learning_rate": 1.222857142857143e-05, + "loss": 2.3834, + "step": 107 + }, + { + "epoch": 0.018534408786682683, + "grad_norm": 5.033652563830539, + "learning_rate": 1.2342857142857144e-05, + "loss": 2.4106, + "step": 108 + }, + { + "epoch": 0.018706023682855673, + "grad_norm": 8.82881241227772, + "learning_rate": 1.245714285714286e-05, + "loss": 2.1689, + "step": 109 + }, + { + "epoch": 0.01887763857902866, + "grad_norm": 4.020974319459271, + "learning_rate": 1.2571428571428572e-05, + "loss": 2.0145, + "step": 110 + }, + { + "epoch": 0.019049253475201647, + "grad_norm": 5.151370367980264, + "learning_rate": 1.2685714285714286e-05, + "loss": 2.5154, + "step": 111 + }, + { + "epoch": 0.019220868371374634, + "grad_norm": 11.56478613792912, + "learning_rate": 1.2800000000000001e-05, + "loss": 2.1864, + "step": 112 + }, + { + "epoch": 0.01939248326754762, + "grad_norm": 5.167901030570373, + "learning_rate": 1.2914285714285715e-05, + "loss": 2.3358, + "step": 113 + }, + { + "epoch": 0.019564098163720612, + "grad_norm": 9.787831322651368, + "learning_rate": 1.302857142857143e-05, + "loss": 2.1912, + "step": 114 + }, + { + "epoch": 0.0197357130598936, + "grad_norm": 5.234233774052135, + "learning_rate": 1.3142857142857145e-05, + "loss": 2.1578, + "step": 115 + }, + { + "epoch": 0.019907327956066586, + "grad_norm": 5.664095721805089, + "learning_rate": 1.3257142857142857e-05, + "loss": 2.381, + "step": 116 + }, + { + "epoch": 0.020078942852239573, + "grad_norm": 5.977125695283194, + "learning_rate": 1.3371428571428572e-05, + "loss": 2.1459, + "step": 117 + }, + { + "epoch": 0.020250557748412563, + "grad_norm": 5.007933101525984, + "learning_rate": 1.3485714285714286e-05, + "loss": 2.283, + "step": 118 + }, + { + "epoch": 0.02042217264458555, + "grad_norm": 7.801138402645364, + "learning_rate": 1.3600000000000002e-05, + "loss": 2.169, + "step": 119 + }, + { + "epoch": 0.020593787540758537, + "grad_norm": 9.627003751448669, + "learning_rate": 1.3714285714285716e-05, + "loss": 2.1238, + "step": 120 + }, + { + "epoch": 0.020765402436931524, + "grad_norm": 6.201321750691995, + "learning_rate": 1.382857142857143e-05, + "loss": 2.2845, + "step": 121 + }, + { + "epoch": 0.020937017333104515, + "grad_norm": 6.739802120345924, + "learning_rate": 1.3942857142857145e-05, + "loss": 2.0094, + "step": 122 + }, + { + "epoch": 0.021108632229277502, + "grad_norm": 6.140016667915861, + "learning_rate": 1.4057142857142858e-05, + "loss": 1.9595, + "step": 123 + }, + { + "epoch": 0.02128024712545049, + "grad_norm": 9.847414600678896, + "learning_rate": 1.4171428571428572e-05, + "loss": 2.1787, + "step": 124 + }, + { + "epoch": 0.021451862021623476, + "grad_norm": 4.69911054437477, + "learning_rate": 1.4285714285714287e-05, + "loss": 2.2823, + "step": 125 + }, + { + "epoch": 0.021623476917796466, + "grad_norm": 7.500875628785198, + "learning_rate": 1.4400000000000001e-05, + "loss": 2.42, + "step": 126 + }, + { + "epoch": 0.021795091813969453, + "grad_norm": 5.860447483933667, + "learning_rate": 1.4514285714285717e-05, + "loss": 2.3998, + "step": 127 + }, + { + "epoch": 0.02196670671014244, + "grad_norm": 5.0042507863008625, + "learning_rate": 1.462857142857143e-05, + "loss": 2.1521, + "step": 128 + }, + { + "epoch": 0.022138321606315427, + "grad_norm": 10.48288589489657, + "learning_rate": 1.4742857142857143e-05, + "loss": 2.2169, + "step": 129 + }, + { + "epoch": 0.022309936502488414, + "grad_norm": 4.170799239900871, + "learning_rate": 1.4857142857142858e-05, + "loss": 2.0158, + "step": 130 + }, + { + "epoch": 0.022481551398661405, + "grad_norm": 5.252861584712857, + "learning_rate": 1.4971428571428572e-05, + "loss": 2.5714, + "step": 131 + }, + { + "epoch": 0.022653166294834392, + "grad_norm": 4.7504203282586595, + "learning_rate": 1.5085714285714288e-05, + "loss": 2.1937, + "step": 132 + }, + { + "epoch": 0.02282478119100738, + "grad_norm": 3.8862421876989717, + "learning_rate": 1.5200000000000002e-05, + "loss": 2.4138, + "step": 133 + }, + { + "epoch": 0.022996396087180366, + "grad_norm": 5.52634558713055, + "learning_rate": 1.5314285714285716e-05, + "loss": 2.2333, + "step": 134 + }, + { + "epoch": 0.023168010983353356, + "grad_norm": 6.031380436251491, + "learning_rate": 1.542857142857143e-05, + "loss": 2.2482, + "step": 135 + }, + { + "epoch": 0.023339625879526343, + "grad_norm": 5.846899422230628, + "learning_rate": 1.5542857142857144e-05, + "loss": 2.2351, + "step": 136 + }, + { + "epoch": 0.02351124077569933, + "grad_norm": 4.499833447260017, + "learning_rate": 1.5657142857142856e-05, + "loss": 2.0721, + "step": 137 + }, + { + "epoch": 0.023682855671872317, + "grad_norm": 5.5578739267495925, + "learning_rate": 1.577142857142857e-05, + "loss": 2.3898, + "step": 138 + }, + { + "epoch": 0.023854470568045308, + "grad_norm": 4.507883690316529, + "learning_rate": 1.5885714285714287e-05, + "loss": 2.1942, + "step": 139 + }, + { + "epoch": 0.024026085464218295, + "grad_norm": 5.465098089669164, + "learning_rate": 1.6000000000000003e-05, + "loss": 2.2732, + "step": 140 + }, + { + "epoch": 0.024197700360391282, + "grad_norm": 3.606924457210559, + "learning_rate": 1.6114285714285715e-05, + "loss": 2.4358, + "step": 141 + }, + { + "epoch": 0.02436931525656427, + "grad_norm": 7.609974907749964, + "learning_rate": 1.622857142857143e-05, + "loss": 2.3202, + "step": 142 + }, + { + "epoch": 0.024540930152737256, + "grad_norm": 8.3992014193753, + "learning_rate": 1.6342857142857146e-05, + "loss": 2.3107, + "step": 143 + }, + { + "epoch": 0.024712545048910246, + "grad_norm": 7.943361068023809, + "learning_rate": 1.645714285714286e-05, + "loss": 2.5093, + "step": 144 + }, + { + "epoch": 0.024884159945083233, + "grad_norm": 9.683401712750092, + "learning_rate": 1.6571428571428574e-05, + "loss": 2.0652, + "step": 145 + }, + { + "epoch": 0.02505577484125622, + "grad_norm": 4.851349498462054, + "learning_rate": 1.6685714285714286e-05, + "loss": 2.1429, + "step": 146 + }, + { + "epoch": 0.025227389737429207, + "grad_norm": 6.116749168180479, + "learning_rate": 1.6800000000000002e-05, + "loss": 2.2171, + "step": 147 + }, + { + "epoch": 0.025399004633602198, + "grad_norm": 21.619649174097454, + "learning_rate": 1.6914285714285717e-05, + "loss": 2.2228, + "step": 148 + }, + { + "epoch": 0.025570619529775185, + "grad_norm": 6.191470229031899, + "learning_rate": 1.702857142857143e-05, + "loss": 1.9655, + "step": 149 + }, + { + "epoch": 0.025742234425948172, + "grad_norm": 6.523677989815507, + "learning_rate": 1.7142857142857142e-05, + "loss": 2.1148, + "step": 150 + }, + { + "epoch": 0.02591384932212116, + "grad_norm": 3.698357238067846, + "learning_rate": 1.7257142857142857e-05, + "loss": 2.0033, + "step": 151 + }, + { + "epoch": 0.02608546421829415, + "grad_norm": 5.04695114971071, + "learning_rate": 1.7371428571428573e-05, + "loss": 1.9735, + "step": 152 + }, + { + "epoch": 0.026257079114467136, + "grad_norm": 5.282682948320752, + "learning_rate": 1.748571428571429e-05, + "loss": 2.0038, + "step": 153 + }, + { + "epoch": 0.026428694010640123, + "grad_norm": 4.800801803810252, + "learning_rate": 1.76e-05, + "loss": 2.1585, + "step": 154 + }, + { + "epoch": 0.02660030890681311, + "grad_norm": 9.028618098248373, + "learning_rate": 1.7714285714285717e-05, + "loss": 2.1533, + "step": 155 + }, + { + "epoch": 0.0267719238029861, + "grad_norm": 12.347764899908109, + "learning_rate": 1.7828571428571432e-05, + "loss": 2.323, + "step": 156 + }, + { + "epoch": 0.026943538699159088, + "grad_norm": 12.727288499723587, + "learning_rate": 1.7942857142857144e-05, + "loss": 2.1587, + "step": 157 + }, + { + "epoch": 0.027115153595332075, + "grad_norm": 8.45403493722998, + "learning_rate": 1.8057142857142857e-05, + "loss": 2.2363, + "step": 158 + }, + { + "epoch": 0.027286768491505062, + "grad_norm": 3.6371063335499394, + "learning_rate": 1.8171428571428572e-05, + "loss": 2.2642, + "step": 159 + }, + { + "epoch": 0.02745838338767805, + "grad_norm": 8.503641705960693, + "learning_rate": 1.8285714285714288e-05, + "loss": 2.0775, + "step": 160 + }, + { + "epoch": 0.02762999828385104, + "grad_norm": 3.9593201776203903, + "learning_rate": 1.8400000000000003e-05, + "loss": 2.0017, + "step": 161 + }, + { + "epoch": 0.027801613180024026, + "grad_norm": 4.047122537768316, + "learning_rate": 1.8514285714285716e-05, + "loss": 2.0842, + "step": 162 + }, + { + "epoch": 0.027973228076197013, + "grad_norm": 4.133796081789339, + "learning_rate": 1.8628571428571428e-05, + "loss": 2.2323, + "step": 163 + }, + { + "epoch": 0.02814484297237, + "grad_norm": 3.5505724791516142, + "learning_rate": 1.8742857142857143e-05, + "loss": 2.4431, + "step": 164 + }, + { + "epoch": 0.02831645786854299, + "grad_norm": 6.368793705040636, + "learning_rate": 1.885714285714286e-05, + "loss": 2.0516, + "step": 165 + }, + { + "epoch": 0.028488072764715978, + "grad_norm": 5.05590285307787, + "learning_rate": 1.8971428571428575e-05, + "loss": 2.0304, + "step": 166 + }, + { + "epoch": 0.028659687660888965, + "grad_norm": 4.610753004740335, + "learning_rate": 1.9085714285714287e-05, + "loss": 2.1065, + "step": 167 + }, + { + "epoch": 0.028831302557061952, + "grad_norm": 4.7447497256392, + "learning_rate": 1.9200000000000003e-05, + "loss": 2.4162, + "step": 168 + }, + { + "epoch": 0.029002917453234942, + "grad_norm": 3.856569889357456, + "learning_rate": 1.9314285714285718e-05, + "loss": 2.2449, + "step": 169 + }, + { + "epoch": 0.02917453234940793, + "grad_norm": 4.010537453686156, + "learning_rate": 1.942857142857143e-05, + "loss": 2.267, + "step": 170 + }, + { + "epoch": 0.029346147245580916, + "grad_norm": 3.895556359814577, + "learning_rate": 1.9542857142857143e-05, + "loss": 2.1048, + "step": 171 + }, + { + "epoch": 0.029517762141753903, + "grad_norm": 3.945479769866957, + "learning_rate": 1.9657142857142858e-05, + "loss": 1.9937, + "step": 172 + }, + { + "epoch": 0.029689377037926894, + "grad_norm": 3.2695958663509788, + "learning_rate": 1.9771428571428574e-05, + "loss": 1.9953, + "step": 173 + }, + { + "epoch": 0.02986099193409988, + "grad_norm": 3.9714084675208574, + "learning_rate": 1.988571428571429e-05, + "loss": 2.1219, + "step": 174 + }, + { + "epoch": 0.030032606830272868, + "grad_norm": 3.662956223801034, + "learning_rate": 2e-05, + "loss": 2.0746, + "step": 175 + }, + { + "epoch": 0.030204221726445855, + "grad_norm": 4.011090108652777, + "learning_rate": 1.999999845522429e-05, + "loss": 2.2858, + "step": 176 + }, + { + "epoch": 0.03037583662261884, + "grad_norm": 4.066209701080919, + "learning_rate": 1.9999993820897636e-05, + "loss": 1.7863, + "step": 177 + }, + { + "epoch": 0.030547451518791832, + "grad_norm": 5.472757350285933, + "learning_rate": 1.9999986097021468e-05, + "loss": 2.2069, + "step": 178 + }, + { + "epoch": 0.03071906641496482, + "grad_norm": 5.834262069284485, + "learning_rate": 1.9999975283598174e-05, + "loss": 2.2528, + "step": 179 + }, + { + "epoch": 0.030890681311137806, + "grad_norm": 3.3155284708163912, + "learning_rate": 1.99999613806311e-05, + "loss": 1.9184, + "step": 180 + }, + { + "epoch": 0.031062296207310793, + "grad_norm": 4.14717746097735, + "learning_rate": 1.999994438812453e-05, + "loss": 2.1259, + "step": 181 + }, + { + "epoch": 0.031233911103483784, + "grad_norm": 3.263252511795784, + "learning_rate": 1.999992430608372e-05, + "loss": 1.9938, + "step": 182 + }, + { + "epoch": 0.03140552599965677, + "grad_norm": 3.4793563126214995, + "learning_rate": 1.9999901134514877e-05, + "loss": 2.0116, + "step": 183 + }, + { + "epoch": 0.03157714089582976, + "grad_norm": 4.728527397407396, + "learning_rate": 1.999987487342516e-05, + "loss": 1.9359, + "step": 184 + }, + { + "epoch": 0.031748755792002745, + "grad_norm": 6.228075711112214, + "learning_rate": 1.9999845522822675e-05, + "loss": 2.32, + "step": 185 + }, + { + "epoch": 0.031920370688175735, + "grad_norm": 8.62147957543855, + "learning_rate": 1.9999813082716498e-05, + "loss": 2.1879, + "step": 186 + }, + { + "epoch": 0.03209198558434872, + "grad_norm": 5.94206466512737, + "learning_rate": 1.999977755311665e-05, + "loss": 2.2037, + "step": 187 + }, + { + "epoch": 0.03226360048052171, + "grad_norm": 4.4989390981166135, + "learning_rate": 1.99997389340341e-05, + "loss": 2.2205, + "step": 188 + }, + { + "epoch": 0.0324352153766947, + "grad_norm": 3.182881699543917, + "learning_rate": 1.9999697225480793e-05, + "loss": 2.1409, + "step": 189 + }, + { + "epoch": 0.03260683027286768, + "grad_norm": 5.806182784255027, + "learning_rate": 1.999965242746961e-05, + "loss": 2.0681, + "step": 190 + }, + { + "epoch": 0.032778445169040674, + "grad_norm": 3.9949497913052134, + "learning_rate": 1.9999604540014385e-05, + "loss": 1.9099, + "step": 191 + }, + { + "epoch": 0.03295006006521366, + "grad_norm": 4.264121839402924, + "learning_rate": 1.999955356312992e-05, + "loss": 2.3339, + "step": 192 + }, + { + "epoch": 0.03312167496138665, + "grad_norm": 3.6493480360975665, + "learning_rate": 1.9999499496831964e-05, + "loss": 2.1846, + "step": 193 + }, + { + "epoch": 0.03329328985755964, + "grad_norm": 3.9791360047438102, + "learning_rate": 1.9999442341137216e-05, + "loss": 1.7587, + "step": 194 + }, + { + "epoch": 0.03346490475373262, + "grad_norm": 4.739405910717689, + "learning_rate": 1.999938209606334e-05, + "loss": 2.1572, + "step": 195 + }, + { + "epoch": 0.03363651964990561, + "grad_norm": 4.698002599709917, + "learning_rate": 1.9999318761628946e-05, + "loss": 2.0548, + "step": 196 + }, + { + "epoch": 0.0338081345460786, + "grad_norm": 4.8135736509450755, + "learning_rate": 1.9999252337853605e-05, + "loss": 2.2919, + "step": 197 + }, + { + "epoch": 0.033979749442251586, + "grad_norm": 4.717374624286988, + "learning_rate": 1.9999182824757835e-05, + "loss": 2.1388, + "step": 198 + }, + { + "epoch": 0.03415136433842458, + "grad_norm": 5.443390808863331, + "learning_rate": 1.9999110222363117e-05, + "loss": 1.9442, + "step": 199 + }, + { + "epoch": 0.03432297923459756, + "grad_norm": 3.5416871939501346, + "learning_rate": 1.9999034530691873e-05, + "loss": 2.3343, + "step": 200 + }, + { + "epoch": 0.03449459413077055, + "grad_norm": 3.928628846334232, + "learning_rate": 1.99989557497675e-05, + "loss": 2.0561, + "step": 201 + }, + { + "epoch": 0.03466620902694354, + "grad_norm": 3.8231391715035294, + "learning_rate": 1.999887387961433e-05, + "loss": 1.8915, + "step": 202 + }, + { + "epoch": 0.034837823923116525, + "grad_norm": 5.148709447014063, + "learning_rate": 1.9998788920257658e-05, + "loss": 1.9244, + "step": 203 + }, + { + "epoch": 0.035009438819289515, + "grad_norm": 5.225707198161678, + "learning_rate": 1.9998700871723736e-05, + "loss": 2.3055, + "step": 204 + }, + { + "epoch": 0.035181053715462506, + "grad_norm": 3.964128867071377, + "learning_rate": 1.999860973403976e-05, + "loss": 2.2706, + "step": 205 + }, + { + "epoch": 0.03535266861163549, + "grad_norm": 4.439156598745672, + "learning_rate": 1.99985155072339e-05, + "loss": 1.9415, + "step": 206 + }, + { + "epoch": 0.03552428350780848, + "grad_norm": 3.747634015520336, + "learning_rate": 1.9998418191335257e-05, + "loss": 1.9203, + "step": 207 + }, + { + "epoch": 0.03569589840398146, + "grad_norm": 9.971101569080222, + "learning_rate": 1.99983177863739e-05, + "loss": 2.036, + "step": 208 + }, + { + "epoch": 0.035867513300154454, + "grad_norm": 3.3921949604506754, + "learning_rate": 1.999821429238085e-05, + "loss": 2.1034, + "step": 209 + }, + { + "epoch": 0.036039128196327444, + "grad_norm": 3.6407447317392494, + "learning_rate": 1.9998107709388084e-05, + "loss": 2.1111, + "step": 210 + }, + { + "epoch": 0.03621074309250043, + "grad_norm": 4.485329884784654, + "learning_rate": 1.9997998037428528e-05, + "loss": 2.275, + "step": 211 + }, + { + "epoch": 0.03638235798867342, + "grad_norm": 4.190281083470862, + "learning_rate": 1.9997885276536067e-05, + "loss": 2.0305, + "step": 212 + }, + { + "epoch": 0.0365539728848464, + "grad_norm": 3.8190741377340967, + "learning_rate": 1.9997769426745538e-05, + "loss": 2.0532, + "step": 213 + }, + { + "epoch": 0.03672558778101939, + "grad_norm": 5.295757263330625, + "learning_rate": 1.9997650488092737e-05, + "loss": 2.3212, + "step": 214 + }, + { + "epoch": 0.03689720267719238, + "grad_norm": 4.7603003206845695, + "learning_rate": 1.999752846061441e-05, + "loss": 2.0086, + "step": 215 + }, + { + "epoch": 0.037068817573365366, + "grad_norm": 5.2877008090048605, + "learning_rate": 1.9997403344348255e-05, + "loss": 2.0967, + "step": 216 + }, + { + "epoch": 0.037240432469538357, + "grad_norm": 5.746606057083038, + "learning_rate": 1.9997275139332926e-05, + "loss": 2.3167, + "step": 217 + }, + { + "epoch": 0.03741204736571135, + "grad_norm": 4.914828526948254, + "learning_rate": 1.9997143845608038e-05, + "loss": 1.9359, + "step": 218 + }, + { + "epoch": 0.03758366226188433, + "grad_norm": 4.192150027643096, + "learning_rate": 1.999700946321415e-05, + "loss": 2.0423, + "step": 219 + }, + { + "epoch": 0.03775527715805732, + "grad_norm": 7.885046348932198, + "learning_rate": 1.9996871992192784e-05, + "loss": 1.9538, + "step": 220 + }, + { + "epoch": 0.037926892054230305, + "grad_norm": 5.930802630560254, + "learning_rate": 1.999673143258641e-05, + "loss": 2.2622, + "step": 221 + }, + { + "epoch": 0.038098506950403295, + "grad_norm": 6.7277426797522875, + "learning_rate": 1.9996587784438458e-05, + "loss": 1.9707, + "step": 222 + }, + { + "epoch": 0.038270121846576285, + "grad_norm": 5.08161908508106, + "learning_rate": 1.99964410477933e-05, + "loss": 2.1961, + "step": 223 + }, + { + "epoch": 0.03844173674274927, + "grad_norm": 7.548243152560011, + "learning_rate": 1.9996291222696284e-05, + "loss": 1.9572, + "step": 224 + }, + { + "epoch": 0.03861335163892226, + "grad_norm": 4.8097191364480185, + "learning_rate": 1.9996138309193687e-05, + "loss": 1.8955, + "step": 225 + }, + { + "epoch": 0.03878496653509524, + "grad_norm": 4.373817752431658, + "learning_rate": 1.9995982307332763e-05, + "loss": 2.168, + "step": 226 + }, + { + "epoch": 0.038956581431268233, + "grad_norm": 3.5792006126973126, + "learning_rate": 1.99958232171617e-05, + "loss": 2.0136, + "step": 227 + }, + { + "epoch": 0.039128196327441224, + "grad_norm": 3.3180076759778605, + "learning_rate": 1.9995661038729656e-05, + "loss": 2.0724, + "step": 228 + }, + { + "epoch": 0.03929981122361421, + "grad_norm": 4.413172277841362, + "learning_rate": 1.9995495772086735e-05, + "loss": 2.0184, + "step": 229 + }, + { + "epoch": 0.0394714261197872, + "grad_norm": 3.50017394385066, + "learning_rate": 1.9995327417283997e-05, + "loss": 1.9525, + "step": 230 + }, + { + "epoch": 0.03964304101596019, + "grad_norm": 4.104073070939531, + "learning_rate": 1.9995155974373456e-05, + "loss": 2.0843, + "step": 231 + }, + { + "epoch": 0.03981465591213317, + "grad_norm": 3.5664780974854615, + "learning_rate": 1.999498144340808e-05, + "loss": 1.8218, + "step": 232 + }, + { + "epoch": 0.03998627080830616, + "grad_norm": 5.408943361359781, + "learning_rate": 1.9994803824441794e-05, + "loss": 2.2513, + "step": 233 + }, + { + "epoch": 0.040157885704479146, + "grad_norm": 3.6523538502640216, + "learning_rate": 1.9994623117529467e-05, + "loss": 2.0021, + "step": 234 + }, + { + "epoch": 0.040329500600652136, + "grad_norm": 8.254834921731893, + "learning_rate": 1.999443932272694e-05, + "loss": 2.2235, + "step": 235 + }, + { + "epoch": 0.04050111549682513, + "grad_norm": 3.7270234847996426, + "learning_rate": 1.9994252440090985e-05, + "loss": 1.9841, + "step": 236 + }, + { + "epoch": 0.04067273039299811, + "grad_norm": 5.974287742095654, + "learning_rate": 1.999406246967935e-05, + "loss": 2.3204, + "step": 237 + }, + { + "epoch": 0.0408443452891711, + "grad_norm": 3.394315816956387, + "learning_rate": 1.9993869411550724e-05, + "loss": 2.1603, + "step": 238 + }, + { + "epoch": 0.041015960185344084, + "grad_norm": 2.592530619481467, + "learning_rate": 1.9993673265764753e-05, + "loss": 1.8293, + "step": 239 + }, + { + "epoch": 0.041187575081517075, + "grad_norm": 3.0827127181917815, + "learning_rate": 1.9993474032382035e-05, + "loss": 2.0716, + "step": 240 + }, + { + "epoch": 0.041359189977690065, + "grad_norm": 3.6407775794491193, + "learning_rate": 1.9993271711464133e-05, + "loss": 2.0632, + "step": 241 + }, + { + "epoch": 0.04153080487386305, + "grad_norm": 3.539435874588333, + "learning_rate": 1.9993066303073546e-05, + "loss": 2.1099, + "step": 242 + }, + { + "epoch": 0.04170241977003604, + "grad_norm": 4.533093018347158, + "learning_rate": 1.9992857807273738e-05, + "loss": 2.0409, + "step": 243 + }, + { + "epoch": 0.04187403466620903, + "grad_norm": 3.417663569586204, + "learning_rate": 1.9992646224129127e-05, + "loss": 2.1697, + "step": 244 + }, + { + "epoch": 0.04204564956238201, + "grad_norm": 4.287451168460292, + "learning_rate": 1.9992431553705082e-05, + "loss": 2.0607, + "step": 245 + }, + { + "epoch": 0.042217264458555004, + "grad_norm": 3.847626705709783, + "learning_rate": 1.9992213796067924e-05, + "loss": 2.0346, + "step": 246 + }, + { + "epoch": 0.04238887935472799, + "grad_norm": 6.781181795874853, + "learning_rate": 1.999199295128493e-05, + "loss": 2.1162, + "step": 247 + }, + { + "epoch": 0.04256049425090098, + "grad_norm": 3.8423922077002626, + "learning_rate": 1.999176901942434e-05, + "loss": 2.1391, + "step": 248 + }, + { + "epoch": 0.04273210914707397, + "grad_norm": 2.944651891036526, + "learning_rate": 1.999154200055533e-05, + "loss": 2.1784, + "step": 249 + }, + { + "epoch": 0.04290372404324695, + "grad_norm": 3.6257027439944634, + "learning_rate": 1.9991311894748037e-05, + "loss": 2.1781, + "step": 250 + }, + { + "epoch": 0.04307533893941994, + "grad_norm": 3.153539075259829, + "learning_rate": 1.999107870207356e-05, + "loss": 1.8551, + "step": 251 + }, + { + "epoch": 0.04324695383559293, + "grad_norm": 4.125627135993159, + "learning_rate": 1.9990842422603945e-05, + "loss": 1.9995, + "step": 252 + }, + { + "epoch": 0.043418568731765916, + "grad_norm": 6.0088583399602555, + "learning_rate": 1.9990603056412187e-05, + "loss": 2.0258, + "step": 253 + }, + { + "epoch": 0.04359018362793891, + "grad_norm": 3.5960539809680356, + "learning_rate": 1.9990360603572244e-05, + "loss": 2.1001, + "step": 254 + }, + { + "epoch": 0.04376179852411189, + "grad_norm": 3.0334651823685213, + "learning_rate": 1.9990115064159017e-05, + "loss": 2.2487, + "step": 255 + }, + { + "epoch": 0.04393341342028488, + "grad_norm": 3.5943226010838436, + "learning_rate": 1.9989866438248372e-05, + "loss": 2.1614, + "step": 256 + }, + { + "epoch": 0.04410502831645787, + "grad_norm": 3.051716911652185, + "learning_rate": 1.998961472591712e-05, + "loss": 1.936, + "step": 257 + }, + { + "epoch": 0.044276643212630855, + "grad_norm": 3.3852311554081944, + "learning_rate": 1.9989359927243034e-05, + "loss": 2.0865, + "step": 258 + }, + { + "epoch": 0.044448258108803845, + "grad_norm": 5.102975445206884, + "learning_rate": 1.998910204230483e-05, + "loss": 2.0388, + "step": 259 + }, + { + "epoch": 0.04461987300497683, + "grad_norm": 4.013708890038843, + "learning_rate": 1.9988841071182182e-05, + "loss": 2.1261, + "step": 260 + }, + { + "epoch": 0.04479148790114982, + "grad_norm": 3.391615244801847, + "learning_rate": 1.9988577013955726e-05, + "loss": 2.0906, + "step": 261 + }, + { + "epoch": 0.04496310279732281, + "grad_norm": 3.8082378978033837, + "learning_rate": 1.9988309870707035e-05, + "loss": 1.8132, + "step": 262 + }, + { + "epoch": 0.04513471769349579, + "grad_norm": 3.9267193517957324, + "learning_rate": 1.998803964151865e-05, + "loss": 2.0355, + "step": 263 + }, + { + "epoch": 0.045306332589668784, + "grad_norm": 3.20937664441744, + "learning_rate": 1.998776632647406e-05, + "loss": 2.1182, + "step": 264 + }, + { + "epoch": 0.045477947485841774, + "grad_norm": 4.126309440434599, + "learning_rate": 1.99874899256577e-05, + "loss": 2.184, + "step": 265 + }, + { + "epoch": 0.04564956238201476, + "grad_norm": 3.137038546242247, + "learning_rate": 1.9987210439154973e-05, + "loss": 2.0854, + "step": 266 + }, + { + "epoch": 0.04582117727818775, + "grad_norm": 3.7249125051993626, + "learning_rate": 1.9986927867052226e-05, + "loss": 2.2648, + "step": 267 + }, + { + "epoch": 0.04599279217436073, + "grad_norm": 5.210752851368842, + "learning_rate": 1.9986642209436758e-05, + "loss": 1.932, + "step": 268 + }, + { + "epoch": 0.04616440707053372, + "grad_norm": 3.5378426429474272, + "learning_rate": 1.998635346639683e-05, + "loss": 2.1835, + "step": 269 + }, + { + "epoch": 0.04633602196670671, + "grad_norm": 2.8272341094270814, + "learning_rate": 1.9986061638021643e-05, + "loss": 1.932, + "step": 270 + }, + { + "epoch": 0.046507636862879696, + "grad_norm": 15.551161972920074, + "learning_rate": 1.9985766724401365e-05, + "loss": 1.7904, + "step": 271 + }, + { + "epoch": 0.04667925175905269, + "grad_norm": 3.4232639279297934, + "learning_rate": 1.998546872562711e-05, + "loss": 2.2103, + "step": 272 + }, + { + "epoch": 0.04685086665522567, + "grad_norm": 3.7729142339088333, + "learning_rate": 1.9985167641790948e-05, + "loss": 1.7944, + "step": 273 + }, + { + "epoch": 0.04702248155139866, + "grad_norm": 3.3560768641924814, + "learning_rate": 1.9984863472985892e-05, + "loss": 1.744, + "step": 274 + }, + { + "epoch": 0.04719409644757165, + "grad_norm": 4.2064486421624245, + "learning_rate": 1.998455621930593e-05, + "loss": 2.0007, + "step": 275 + }, + { + "epoch": 0.047365711343744635, + "grad_norm": 3.193568853483708, + "learning_rate": 1.9984245880845975e-05, + "loss": 1.9463, + "step": 276 + }, + { + "epoch": 0.047537326239917625, + "grad_norm": 3.410498022019692, + "learning_rate": 1.9983932457701918e-05, + "loss": 1.7809, + "step": 277 + }, + { + "epoch": 0.047708941136090616, + "grad_norm": 3.480581671341832, + "learning_rate": 1.998361594997059e-05, + "loss": 2.1692, + "step": 278 + }, + { + "epoch": 0.0478805560322636, + "grad_norm": 3.5109612424418626, + "learning_rate": 1.9983296357749774e-05, + "loss": 1.9474, + "step": 279 + }, + { + "epoch": 0.04805217092843659, + "grad_norm": 3.6369447796663796, + "learning_rate": 1.9982973681138215e-05, + "loss": 2.198, + "step": 280 + }, + { + "epoch": 0.04822378582460957, + "grad_norm": 4.494218086331917, + "learning_rate": 1.9982647920235604e-05, + "loss": 2.093, + "step": 281 + }, + { + "epoch": 0.048395400720782564, + "grad_norm": 4.5341565606847976, + "learning_rate": 1.9982319075142585e-05, + "loss": 2.0615, + "step": 282 + }, + { + "epoch": 0.048567015616955554, + "grad_norm": 3.775401733957357, + "learning_rate": 1.998198714596076e-05, + "loss": 2.0879, + "step": 283 + }, + { + "epoch": 0.04873863051312854, + "grad_norm": 3.224281406562129, + "learning_rate": 1.998165213279267e-05, + "loss": 2.3028, + "step": 284 + }, + { + "epoch": 0.04891024540930153, + "grad_norm": 3.1701862227332374, + "learning_rate": 1.9981314035741835e-05, + "loss": 2.2169, + "step": 285 + }, + { + "epoch": 0.04908186030547451, + "grad_norm": 3.8078015062216886, + "learning_rate": 1.99809728549127e-05, + "loss": 1.9385, + "step": 286 + }, + { + "epoch": 0.0492534752016475, + "grad_norm": 3.4956049530811146, + "learning_rate": 1.9980628590410675e-05, + "loss": 1.7499, + "step": 287 + }, + { + "epoch": 0.04942509009782049, + "grad_norm": 3.1104021548104503, + "learning_rate": 1.9980281242342127e-05, + "loss": 2.1662, + "step": 288 + }, + { + "epoch": 0.049596704993993476, + "grad_norm": 3.5602611180813244, + "learning_rate": 1.997993081081437e-05, + "loss": 1.7943, + "step": 289 + }, + { + "epoch": 0.04976831989016647, + "grad_norm": 2.9550996231707765, + "learning_rate": 1.997957729593567e-05, + "loss": 2.0344, + "step": 290 + }, + { + "epoch": 0.04993993478633946, + "grad_norm": 12.10802116117018, + "learning_rate": 1.997922069781525e-05, + "loss": 2.2949, + "step": 291 + }, + { + "epoch": 0.05011154968251244, + "grad_norm": 3.991348866827493, + "learning_rate": 1.997886101656328e-05, + "loss": 2.0757, + "step": 292 + }, + { + "epoch": 0.05028316457868543, + "grad_norm": 2.9214814563011853, + "learning_rate": 1.9978498252290887e-05, + "loss": 2.1655, + "step": 293 + }, + { + "epoch": 0.050454779474858415, + "grad_norm": 10.328640309606229, + "learning_rate": 1.9978132405110148e-05, + "loss": 2.0256, + "step": 294 + }, + { + "epoch": 0.050626394371031405, + "grad_norm": 3.0822074962729933, + "learning_rate": 1.997776347513409e-05, + "loss": 1.981, + "step": 295 + }, + { + "epoch": 0.050798009267204396, + "grad_norm": 3.7977003070847384, + "learning_rate": 1.9977391462476704e-05, + "loss": 2.0104, + "step": 296 + }, + { + "epoch": 0.05096962416337738, + "grad_norm": 3.129260857004221, + "learning_rate": 1.9977016367252916e-05, + "loss": 2.0055, + "step": 297 + }, + { + "epoch": 0.05114123905955037, + "grad_norm": 2.49936465622327, + "learning_rate": 1.9976638189578623e-05, + "loss": 2.1314, + "step": 298 + }, + { + "epoch": 0.05131285395572336, + "grad_norm": 5.627534615826663, + "learning_rate": 1.9976256929570657e-05, + "loss": 1.9054, + "step": 299 + }, + { + "epoch": 0.051484468851896344, + "grad_norm": 6.644104933532107, + "learning_rate": 1.997587258734681e-05, + "loss": 2.0553, + "step": 300 + }, + { + "epoch": 0.051656083748069334, + "grad_norm": 6.20781166333256, + "learning_rate": 1.9975485163025837e-05, + "loss": 1.7756, + "step": 301 + }, + { + "epoch": 0.05182769864424232, + "grad_norm": 4.777678957881222, + "learning_rate": 1.9975094656727424e-05, + "loss": 1.9228, + "step": 302 + }, + { + "epoch": 0.05199931354041531, + "grad_norm": 3.2413464766698676, + "learning_rate": 1.9974701068572223e-05, + "loss": 1.9268, + "step": 303 + }, + { + "epoch": 0.0521709284365883, + "grad_norm": 3.4243989209979513, + "learning_rate": 1.9974304398681837e-05, + "loss": 2.0327, + "step": 304 + }, + { + "epoch": 0.05234254333276128, + "grad_norm": 3.6407848281373316, + "learning_rate": 1.9973904647178816e-05, + "loss": 1.8295, + "step": 305 + }, + { + "epoch": 0.05251415822893427, + "grad_norm": 3.277014675061404, + "learning_rate": 1.997350181418667e-05, + "loss": 1.7963, + "step": 306 + }, + { + "epoch": 0.052685773125107256, + "grad_norm": 4.215647349610302, + "learning_rate": 1.997309589982985e-05, + "loss": 1.8516, + "step": 307 + }, + { + "epoch": 0.05285738802128025, + "grad_norm": 3.546260309218347, + "learning_rate": 1.997268690423377e-05, + "loss": 2.0199, + "step": 308 + }, + { + "epoch": 0.05302900291745324, + "grad_norm": 5.221183858099176, + "learning_rate": 1.997227482752479e-05, + "loss": 1.976, + "step": 309 + }, + { + "epoch": 0.05320061781362622, + "grad_norm": 3.7676627879817124, + "learning_rate": 1.997185966983022e-05, + "loss": 1.9, + "step": 310 + }, + { + "epoch": 0.05337223270979921, + "grad_norm": 3.52804983582057, + "learning_rate": 1.9971441431278337e-05, + "loss": 1.9173, + "step": 311 + }, + { + "epoch": 0.0535438476059722, + "grad_norm": 6.530673481853173, + "learning_rate": 1.9971020111998342e-05, + "loss": 2.0977, + "step": 312 + }, + { + "epoch": 0.053715462502145185, + "grad_norm": 5.0170849453421855, + "learning_rate": 1.9970595712120414e-05, + "loss": 2.0448, + "step": 313 + }, + { + "epoch": 0.053887077398318176, + "grad_norm": 4.130987581333207, + "learning_rate": 1.997016823177567e-05, + "loss": 1.8148, + "step": 314 + }, + { + "epoch": 0.05405869229449116, + "grad_norm": 6.215053078586563, + "learning_rate": 1.9969737671096182e-05, + "loss": 1.944, + "step": 315 + }, + { + "epoch": 0.05423030719066415, + "grad_norm": 3.8873415625275602, + "learning_rate": 1.9969304030214976e-05, + "loss": 1.9012, + "step": 316 + }, + { + "epoch": 0.05440192208683714, + "grad_norm": 3.9648633375059834, + "learning_rate": 1.9968867309266025e-05, + "loss": 1.834, + "step": 317 + }, + { + "epoch": 0.054573536983010124, + "grad_norm": 3.260714173660746, + "learning_rate": 1.9968427508384256e-05, + "loss": 2.0917, + "step": 318 + }, + { + "epoch": 0.054745151879183114, + "grad_norm": 3.3986200126057033, + "learning_rate": 1.996798462770555e-05, + "loss": 2.0721, + "step": 319 + }, + { + "epoch": 0.0549167667753561, + "grad_norm": 2.8063111215512357, + "learning_rate": 1.9967538667366738e-05, + "loss": 1.7492, + "step": 320 + }, + { + "epoch": 0.05508838167152909, + "grad_norm": 3.148377573614974, + "learning_rate": 1.99670896275056e-05, + "loss": 1.7018, + "step": 321 + }, + { + "epoch": 0.05525999656770208, + "grad_norm": 3.036364302057122, + "learning_rate": 1.996663750826087e-05, + "loss": 1.9847, + "step": 322 + }, + { + "epoch": 0.05543161146387506, + "grad_norm": 3.4340609311578767, + "learning_rate": 1.996618230977223e-05, + "loss": 1.9873, + "step": 323 + }, + { + "epoch": 0.05560322636004805, + "grad_norm": 3.483425040286511, + "learning_rate": 1.996572403218032e-05, + "loss": 2.2108, + "step": 324 + }, + { + "epoch": 0.05577484125622104, + "grad_norm": 2.6320552294886586, + "learning_rate": 1.9965262675626726e-05, + "loss": 1.977, + "step": 325 + }, + { + "epoch": 0.055946456152394027, + "grad_norm": 3.7381712119779547, + "learning_rate": 1.996479824025398e-05, + "loss": 1.9211, + "step": 326 + }, + { + "epoch": 0.05611807104856702, + "grad_norm": 3.014971799857585, + "learning_rate": 1.9964330726205585e-05, + "loss": 2.0162, + "step": 327 + }, + { + "epoch": 0.05628968594474, + "grad_norm": 3.7417585676111815, + "learning_rate": 1.9963860133625967e-05, + "loss": 2.0456, + "step": 328 + }, + { + "epoch": 0.05646130084091299, + "grad_norm": 3.517090546394605, + "learning_rate": 1.996338646266053e-05, + "loss": 2.041, + "step": 329 + }, + { + "epoch": 0.05663291573708598, + "grad_norm": 3.51522607791768, + "learning_rate": 1.996290971345561e-05, + "loss": 1.9018, + "step": 330 + }, + { + "epoch": 0.056804530633258965, + "grad_norm": 4.245594218887946, + "learning_rate": 1.9962429886158503e-05, + "loss": 2.1062, + "step": 331 + }, + { + "epoch": 0.056976145529431955, + "grad_norm": 3.9658374350384107, + "learning_rate": 1.9961946980917457e-05, + "loss": 2.0405, + "step": 332 + }, + { + "epoch": 0.057147760425604946, + "grad_norm": 2.798536101245012, + "learning_rate": 1.9961460997881664e-05, + "loss": 1.7389, + "step": 333 + }, + { + "epoch": 0.05731937532177793, + "grad_norm": 2.622222621230687, + "learning_rate": 1.9960971937201273e-05, + "loss": 1.8965, + "step": 334 + }, + { + "epoch": 0.05749099021795092, + "grad_norm": 3.5051816941479963, + "learning_rate": 1.996047979902738e-05, + "loss": 1.9965, + "step": 335 + }, + { + "epoch": 0.057662605114123903, + "grad_norm": 4.023153275223512, + "learning_rate": 1.9959984583512034e-05, + "loss": 2.0692, + "step": 336 + }, + { + "epoch": 0.057834220010296894, + "grad_norm": 3.3095562234726668, + "learning_rate": 1.995948629080824e-05, + "loss": 2.1669, + "step": 337 + }, + { + "epoch": 0.058005834906469884, + "grad_norm": 3.250292635314978, + "learning_rate": 1.995898492106994e-05, + "loss": 1.963, + "step": 338 + }, + { + "epoch": 0.05817744980264287, + "grad_norm": 3.7160346187443776, + "learning_rate": 1.9958480474452038e-05, + "loss": 2.0932, + "step": 339 + }, + { + "epoch": 0.05834906469881586, + "grad_norm": 5.363449367366593, + "learning_rate": 1.9957972951110384e-05, + "loss": 1.8988, + "step": 340 + }, + { + "epoch": 0.05852067959498884, + "grad_norm": 2.9097716754228724, + "learning_rate": 1.9957462351201787e-05, + "loss": 2.0416, + "step": 341 + }, + { + "epoch": 0.05869229449116183, + "grad_norm": 3.998268814523747, + "learning_rate": 1.995694867488399e-05, + "loss": 1.9339, + "step": 342 + }, + { + "epoch": 0.05886390938733482, + "grad_norm": 3.263953790011657, + "learning_rate": 1.9956431922315696e-05, + "loss": 1.8995, + "step": 343 + }, + { + "epoch": 0.059035524283507806, + "grad_norm": 3.567779664486138, + "learning_rate": 1.995591209365657e-05, + "loss": 1.9859, + "step": 344 + }, + { + "epoch": 0.0592071391796808, + "grad_norm": 3.2861151704697247, + "learning_rate": 1.9955389189067205e-05, + "loss": 1.9084, + "step": 345 + }, + { + "epoch": 0.05937875407585379, + "grad_norm": 3.5321753686438653, + "learning_rate": 1.9954863208709155e-05, + "loss": 2.0222, + "step": 346 + }, + { + "epoch": 0.05955036897202677, + "grad_norm": 3.4816258808995637, + "learning_rate": 1.995433415274493e-05, + "loss": 1.8883, + "step": 347 + }, + { + "epoch": 0.05972198386819976, + "grad_norm": 2.9457399292289317, + "learning_rate": 1.9953802021337984e-05, + "loss": 1.8481, + "step": 348 + }, + { + "epoch": 0.059893598764372745, + "grad_norm": 3.0190464376361485, + "learning_rate": 1.9953266814652715e-05, + "loss": 1.9166, + "step": 349 + }, + { + "epoch": 0.060065213660545735, + "grad_norm": 2.74366104722361, + "learning_rate": 1.9952728532854488e-05, + "loss": 2.1952, + "step": 350 + }, + { + "epoch": 0.060236828556718726, + "grad_norm": 4.406679756642423, + "learning_rate": 1.9952187176109598e-05, + "loss": 2.3529, + "step": 351 + }, + { + "epoch": 0.06040844345289171, + "grad_norm": 3.489249349264093, + "learning_rate": 1.9951642744585307e-05, + "loss": 2.0213, + "step": 352 + }, + { + "epoch": 0.0605800583490647, + "grad_norm": 4.451462316787464, + "learning_rate": 1.9951095238449816e-05, + "loss": 1.9524, + "step": 353 + }, + { + "epoch": 0.06075167324523768, + "grad_norm": 3.346050256969279, + "learning_rate": 1.995054465787228e-05, + "loss": 2.1326, + "step": 354 + }, + { + "epoch": 0.060923288141410674, + "grad_norm": 3.274753224765989, + "learning_rate": 1.994999100302281e-05, + "loss": 1.9726, + "step": 355 + }, + { + "epoch": 0.061094903037583664, + "grad_norm": 3.452040592920918, + "learning_rate": 1.994943427407245e-05, + "loss": 2.0665, + "step": 356 + }, + { + "epoch": 0.06126651793375665, + "grad_norm": 3.325702290390705, + "learning_rate": 1.994887447119321e-05, + "loss": 1.8927, + "step": 357 + }, + { + "epoch": 0.06143813282992964, + "grad_norm": 2.8228953686847116, + "learning_rate": 1.9948311594558044e-05, + "loss": 1.9875, + "step": 358 + }, + { + "epoch": 0.06160974772610263, + "grad_norm": 3.1045240616279597, + "learning_rate": 1.994774564434086e-05, + "loss": 2.274, + "step": 359 + }, + { + "epoch": 0.06178136262227561, + "grad_norm": 3.616583977726727, + "learning_rate": 1.99471766207165e-05, + "loss": 1.8978, + "step": 360 + }, + { + "epoch": 0.0619529775184486, + "grad_norm": 5.010761526550373, + "learning_rate": 1.9946604523860775e-05, + "loss": 2.0328, + "step": 361 + }, + { + "epoch": 0.062124592414621586, + "grad_norm": 3.1813612302118255, + "learning_rate": 1.9946029353950438e-05, + "loss": 2.1191, + "step": 362 + }, + { + "epoch": 0.06229620731079458, + "grad_norm": 8.461598649395224, + "learning_rate": 1.9945451111163184e-05, + "loss": 2.1219, + "step": 363 + }, + { + "epoch": 0.06246782220696757, + "grad_norm": 3.6026486209249753, + "learning_rate": 1.9944869795677673e-05, + "loss": 1.9317, + "step": 364 + }, + { + "epoch": 0.06263943710314056, + "grad_norm": 3.1743447509788703, + "learning_rate": 1.99442854076735e-05, + "loss": 1.8406, + "step": 365 + }, + { + "epoch": 0.06281105199931354, + "grad_norm": 2.9018765647258724, + "learning_rate": 1.994369794733121e-05, + "loss": 1.9747, + "step": 366 + }, + { + "epoch": 0.06298266689548652, + "grad_norm": 4.509269773211525, + "learning_rate": 1.9943107414832314e-05, + "loss": 1.7696, + "step": 367 + }, + { + "epoch": 0.06315428179165952, + "grad_norm": 3.418384148627606, + "learning_rate": 1.994251381035925e-05, + "loss": 1.9637, + "step": 368 + }, + { + "epoch": 0.0633258966878325, + "grad_norm": 2.693871077608062, + "learning_rate": 1.994191713409542e-05, + "loss": 2.0829, + "step": 369 + }, + { + "epoch": 0.06349751158400549, + "grad_norm": 3.6544102213339675, + "learning_rate": 1.9941317386225165e-05, + "loss": 2.1403, + "step": 370 + }, + { + "epoch": 0.06366912648017847, + "grad_norm": 3.4147609805280825, + "learning_rate": 1.9940714566933787e-05, + "loss": 2.0245, + "step": 371 + }, + { + "epoch": 0.06384074137635147, + "grad_norm": 3.51893148223146, + "learning_rate": 1.994010867640753e-05, + "loss": 1.9533, + "step": 372 + }, + { + "epoch": 0.06401235627252445, + "grad_norm": 6.446889997539744, + "learning_rate": 1.993949971483358e-05, + "loss": 2.2087, + "step": 373 + }, + { + "epoch": 0.06418397116869744, + "grad_norm": 3.5840239062714696, + "learning_rate": 1.9938887682400084e-05, + "loss": 2.0374, + "step": 374 + }, + { + "epoch": 0.06435558606487043, + "grad_norm": 3.564481556922706, + "learning_rate": 1.993827257929613e-05, + "loss": 2.0596, + "step": 375 + }, + { + "epoch": 0.06452720096104342, + "grad_norm": 6.000711626236975, + "learning_rate": 1.9937654405711758e-05, + "loss": 1.9471, + "step": 376 + }, + { + "epoch": 0.0646988158572164, + "grad_norm": 3.593852534204912, + "learning_rate": 1.993703316183796e-05, + "loss": 1.9825, + "step": 377 + }, + { + "epoch": 0.0648704307533894, + "grad_norm": 3.3036832169777335, + "learning_rate": 1.9936408847866668e-05, + "loss": 2.0102, + "step": 378 + }, + { + "epoch": 0.06504204564956238, + "grad_norm": 3.052793501014722, + "learning_rate": 1.9935781463990765e-05, + "loss": 2.0017, + "step": 379 + }, + { + "epoch": 0.06521366054573537, + "grad_norm": 3.6612148008631733, + "learning_rate": 1.993515101040409e-05, + "loss": 1.8878, + "step": 380 + }, + { + "epoch": 0.06538527544190836, + "grad_norm": 3.475842753389438, + "learning_rate": 1.9934517487301423e-05, + "loss": 1.9538, + "step": 381 + }, + { + "epoch": 0.06555689033808135, + "grad_norm": 4.998090766931655, + "learning_rate": 1.9933880894878492e-05, + "loss": 1.8712, + "step": 382 + }, + { + "epoch": 0.06572850523425433, + "grad_norm": 2.589300930701225, + "learning_rate": 1.993324123333198e-05, + "loss": 2.044, + "step": 383 + }, + { + "epoch": 0.06590012013042731, + "grad_norm": 3.780280718282726, + "learning_rate": 1.9932598502859507e-05, + "loss": 2.0189, + "step": 384 + }, + { + "epoch": 0.06607173502660031, + "grad_norm": 3.730079966278093, + "learning_rate": 1.9931952703659655e-05, + "loss": 1.7114, + "step": 385 + }, + { + "epoch": 0.0662433499227733, + "grad_norm": 3.5938600807071572, + "learning_rate": 1.9931303835931942e-05, + "loss": 1.9016, + "step": 386 + }, + { + "epoch": 0.06641496481894628, + "grad_norm": 2.898200117914242, + "learning_rate": 1.993065189987684e-05, + "loss": 1.9806, + "step": 387 + }, + { + "epoch": 0.06658657971511928, + "grad_norm": 3.945719611649247, + "learning_rate": 1.992999689569577e-05, + "loss": 1.9177, + "step": 388 + }, + { + "epoch": 0.06675819461129226, + "grad_norm": 2.747466871189157, + "learning_rate": 1.9929338823591097e-05, + "loss": 2.1244, + "step": 389 + }, + { + "epoch": 0.06692980950746524, + "grad_norm": 3.18948722466054, + "learning_rate": 1.9928677683766135e-05, + "loss": 2.2223, + "step": 390 + }, + { + "epoch": 0.06710142440363824, + "grad_norm": 3.181927887612582, + "learning_rate": 1.992801347642515e-05, + "loss": 1.8363, + "step": 391 + }, + { + "epoch": 0.06727303929981122, + "grad_norm": 3.495407532764056, + "learning_rate": 1.992734620177335e-05, + "loss": 1.9259, + "step": 392 + }, + { + "epoch": 0.06744465419598421, + "grad_norm": 3.155084394755952, + "learning_rate": 1.9926675860016897e-05, + "loss": 1.8842, + "step": 393 + }, + { + "epoch": 0.0676162690921572, + "grad_norm": 4.434655326993586, + "learning_rate": 1.9926002451362886e-05, + "loss": 2.1352, + "step": 394 + }, + { + "epoch": 0.06778788398833019, + "grad_norm": 3.173446973582143, + "learning_rate": 1.992532597601938e-05, + "loss": 2.0347, + "step": 395 + }, + { + "epoch": 0.06795949888450317, + "grad_norm": 4.424686902944709, + "learning_rate": 1.9924646434195375e-05, + "loss": 1.834, + "step": 396 + }, + { + "epoch": 0.06813111378067616, + "grad_norm": 3.019727201660959, + "learning_rate": 1.992396382610082e-05, + "loss": 1.9919, + "step": 397 + }, + { + "epoch": 0.06830272867684915, + "grad_norm": 3.7108070834984654, + "learning_rate": 1.9923278151946614e-05, + "loss": 1.8066, + "step": 398 + }, + { + "epoch": 0.06847434357302214, + "grad_norm": 5.942983928233282, + "learning_rate": 1.9922589411944592e-05, + "loss": 1.9729, + "step": 399 + }, + { + "epoch": 0.06864595846919512, + "grad_norm": 3.633377819528754, + "learning_rate": 1.992189760630755e-05, + "loss": 1.8059, + "step": 400 + }, + { + "epoch": 0.06881757336536812, + "grad_norm": 4.220312113768063, + "learning_rate": 1.9921202735249224e-05, + "loss": 1.7966, + "step": 401 + }, + { + "epoch": 0.0689891882615411, + "grad_norm": 3.5661471053114893, + "learning_rate": 1.9920504798984293e-05, + "loss": 1.9462, + "step": 402 + }, + { + "epoch": 0.06916080315771408, + "grad_norm": 3.5535047546791594, + "learning_rate": 1.9919803797728393e-05, + "loss": 1.8868, + "step": 403 + }, + { + "epoch": 0.06933241805388708, + "grad_norm": 3.3674287157828013, + "learning_rate": 1.9919099731698103e-05, + "loss": 1.9673, + "step": 404 + }, + { + "epoch": 0.06950403295006007, + "grad_norm": 2.8131842269351166, + "learning_rate": 1.991839260111094e-05, + "loss": 2.0662, + "step": 405 + }, + { + "epoch": 0.06967564784623305, + "grad_norm": 3.782756029035063, + "learning_rate": 1.9917682406185388e-05, + "loss": 2.1042, + "step": 406 + }, + { + "epoch": 0.06984726274240605, + "grad_norm": 3.2660228549491497, + "learning_rate": 1.9916969147140856e-05, + "loss": 1.9055, + "step": 407 + }, + { + "epoch": 0.07001887763857903, + "grad_norm": 3.769049985211678, + "learning_rate": 1.991625282419771e-05, + "loss": 1.7759, + "step": 408 + }, + { + "epoch": 0.07019049253475201, + "grad_norm": 4.273349197912016, + "learning_rate": 1.9915533437577264e-05, + "loss": 2.1668, + "step": 409 + }, + { + "epoch": 0.07036210743092501, + "grad_norm": 4.4434702887682835, + "learning_rate": 1.9914810987501776e-05, + "loss": 1.9974, + "step": 410 + }, + { + "epoch": 0.070533722327098, + "grad_norm": 3.417576560816798, + "learning_rate": 1.991408547419445e-05, + "loss": 1.962, + "step": 411 + }, + { + "epoch": 0.07070533722327098, + "grad_norm": 3.700950392107592, + "learning_rate": 1.991335689787944e-05, + "loss": 1.9546, + "step": 412 + }, + { + "epoch": 0.07087695211944396, + "grad_norm": 3.26947208946921, + "learning_rate": 1.9912625258781835e-05, + "loss": 1.7231, + "step": 413 + }, + { + "epoch": 0.07104856701561696, + "grad_norm": 4.046723169623149, + "learning_rate": 1.9911890557127685e-05, + "loss": 1.9454, + "step": 414 + }, + { + "epoch": 0.07122018191178994, + "grad_norm": 4.412385919431148, + "learning_rate": 1.991115279314398e-05, + "loss": 1.6827, + "step": 415 + }, + { + "epoch": 0.07139179680796293, + "grad_norm": 3.05634464104461, + "learning_rate": 1.9910411967058656e-05, + "loss": 2.0428, + "step": 416 + }, + { + "epoch": 0.07156341170413592, + "grad_norm": 3.249064810850331, + "learning_rate": 1.9909668079100595e-05, + "loss": 1.8849, + "step": 417 + }, + { + "epoch": 0.07173502660030891, + "grad_norm": 3.187057708839728, + "learning_rate": 1.9908921129499624e-05, + "loss": 2.0543, + "step": 418 + }, + { + "epoch": 0.07190664149648189, + "grad_norm": 12.91007819072769, + "learning_rate": 1.9908171118486514e-05, + "loss": 1.8321, + "step": 419 + }, + { + "epoch": 0.07207825639265489, + "grad_norm": 2.7855151460674326, + "learning_rate": 1.990741804629299e-05, + "loss": 2.0461, + "step": 420 + }, + { + "epoch": 0.07224987128882787, + "grad_norm": 2.9537147805502277, + "learning_rate": 1.9906661913151716e-05, + "loss": 1.8317, + "step": 421 + }, + { + "epoch": 0.07242148618500086, + "grad_norm": 3.927997817390927, + "learning_rate": 1.99059027192963e-05, + "loss": 1.9384, + "step": 422 + }, + { + "epoch": 0.07259310108117385, + "grad_norm": 4.094598512833103, + "learning_rate": 1.9905140464961304e-05, + "loss": 1.9198, + "step": 423 + }, + { + "epoch": 0.07276471597734684, + "grad_norm": 4.239176127453716, + "learning_rate": 1.9904375150382223e-05, + "loss": 1.8482, + "step": 424 + }, + { + "epoch": 0.07293633087351982, + "grad_norm": 4.024275611068595, + "learning_rate": 1.9903606775795518e-05, + "loss": 2.0661, + "step": 425 + }, + { + "epoch": 0.0731079457696928, + "grad_norm": 3.1193533477311663, + "learning_rate": 1.9902835341438565e-05, + "loss": 1.8587, + "step": 426 + }, + { + "epoch": 0.0732795606658658, + "grad_norm": 2.512491975849034, + "learning_rate": 1.9902060847549716e-05, + "loss": 1.8338, + "step": 427 + }, + { + "epoch": 0.07345117556203878, + "grad_norm": 2.658662449947897, + "learning_rate": 1.990128329436825e-05, + "loss": 1.8456, + "step": 428 + }, + { + "epoch": 0.07362279045821177, + "grad_norm": 3.6995282944953716, + "learning_rate": 1.99005026821344e-05, + "loss": 2.0521, + "step": 429 + }, + { + "epoch": 0.07379440535438477, + "grad_norm": 2.6358572188183444, + "learning_rate": 1.989971901108933e-05, + "loss": 1.7836, + "step": 430 + }, + { + "epoch": 0.07396602025055775, + "grad_norm": 4.127850827221215, + "learning_rate": 1.989893228147517e-05, + "loss": 1.8566, + "step": 431 + }, + { + "epoch": 0.07413763514673073, + "grad_norm": 3.0195471907690847, + "learning_rate": 1.9898142493534975e-05, + "loss": 1.9363, + "step": 432 + }, + { + "epoch": 0.07430925004290373, + "grad_norm": 3.3071702203798874, + "learning_rate": 1.9897349647512762e-05, + "loss": 2.1171, + "step": 433 + }, + { + "epoch": 0.07448086493907671, + "grad_norm": 3.6398663295664493, + "learning_rate": 1.9896553743653484e-05, + "loss": 1.9054, + "step": 434 + }, + { + "epoch": 0.0746524798352497, + "grad_norm": 2.830364598806842, + "learning_rate": 1.9895754782203035e-05, + "loss": 1.8125, + "step": 435 + }, + { + "epoch": 0.0748240947314227, + "grad_norm": 3.0352347000519364, + "learning_rate": 1.989495276340826e-05, + "loss": 1.9944, + "step": 436 + }, + { + "epoch": 0.07499570962759568, + "grad_norm": 3.0099781708301045, + "learning_rate": 1.989414768751695e-05, + "loss": 1.9338, + "step": 437 + }, + { + "epoch": 0.07516732452376866, + "grad_norm": 3.315329810451368, + "learning_rate": 1.9893339554777834e-05, + "loss": 1.9966, + "step": 438 + }, + { + "epoch": 0.07533893941994164, + "grad_norm": 7.665918309754256, + "learning_rate": 1.989252836544059e-05, + "loss": 1.9866, + "step": 439 + }, + { + "epoch": 0.07551055431611464, + "grad_norm": 3.408241793814834, + "learning_rate": 1.989171411975584e-05, + "loss": 2.0964, + "step": 440 + }, + { + "epoch": 0.07568216921228763, + "grad_norm": 4.008997099121279, + "learning_rate": 1.9890896817975145e-05, + "loss": 1.9554, + "step": 441 + }, + { + "epoch": 0.07585378410846061, + "grad_norm": 2.965087523211052, + "learning_rate": 1.989007646035102e-05, + "loss": 1.6524, + "step": 442 + }, + { + "epoch": 0.0760253990046336, + "grad_norm": 2.945400897183803, + "learning_rate": 1.9889253047136913e-05, + "loss": 2.0752, + "step": 443 + }, + { + "epoch": 0.07619701390080659, + "grad_norm": 3.5975580699086382, + "learning_rate": 1.988842657858723e-05, + "loss": 2.0144, + "step": 444 + }, + { + "epoch": 0.07636862879697957, + "grad_norm": 3.741142530595054, + "learning_rate": 1.9887597054957304e-05, + "loss": 1.9509, + "step": 445 + }, + { + "epoch": 0.07654024369315257, + "grad_norm": 2.6574967818936566, + "learning_rate": 1.9886764476503425e-05, + "loss": 1.7284, + "step": 446 + }, + { + "epoch": 0.07671185858932555, + "grad_norm": 2.819419196217178, + "learning_rate": 1.9885928843482824e-05, + "loss": 1.9558, + "step": 447 + }, + { + "epoch": 0.07688347348549854, + "grad_norm": 5.046569462324691, + "learning_rate": 1.988509015615367e-05, + "loss": 1.8855, + "step": 448 + }, + { + "epoch": 0.07705508838167154, + "grad_norm": 2.408377661475034, + "learning_rate": 1.9884248414775083e-05, + "loss": 1.8529, + "step": 449 + }, + { + "epoch": 0.07722670327784452, + "grad_norm": 4.936975052755598, + "learning_rate": 1.9883403619607123e-05, + "loss": 2.1046, + "step": 450 + }, + { + "epoch": 0.0773983181740175, + "grad_norm": 3.1958831225716393, + "learning_rate": 1.9882555770910793e-05, + "loss": 1.7649, + "step": 451 + }, + { + "epoch": 0.07756993307019049, + "grad_norm": 2.6807885905387816, + "learning_rate": 1.9881704868948035e-05, + "loss": 1.8609, + "step": 452 + }, + { + "epoch": 0.07774154796636348, + "grad_norm": 10.419998758920823, + "learning_rate": 1.9880850913981748e-05, + "loss": 1.958, + "step": 453 + }, + { + "epoch": 0.07791316286253647, + "grad_norm": 3.8775785410147225, + "learning_rate": 1.9879993906275767e-05, + "loss": 2.065, + "step": 454 + }, + { + "epoch": 0.07808477775870945, + "grad_norm": 2.849534708153432, + "learning_rate": 1.9879133846094856e-05, + "loss": 1.8052, + "step": 455 + }, + { + "epoch": 0.07825639265488245, + "grad_norm": 3.5578531827828814, + "learning_rate": 1.987827073370475e-05, + "loss": 1.8499, + "step": 456 + }, + { + "epoch": 0.07842800755105543, + "grad_norm": 2.5982057469746147, + "learning_rate": 1.98774045693721e-05, + "loss": 1.7817, + "step": 457 + }, + { + "epoch": 0.07859962244722841, + "grad_norm": 3.5287356306423914, + "learning_rate": 1.987653535336452e-05, + "loss": 2.0148, + "step": 458 + }, + { + "epoch": 0.07877123734340141, + "grad_norm": 2.7055000404778546, + "learning_rate": 1.9875663085950556e-05, + "loss": 1.7614, + "step": 459 + }, + { + "epoch": 0.0789428522395744, + "grad_norm": 2.805128544342488, + "learning_rate": 1.9874787767399703e-05, + "loss": 2.061, + "step": 460 + }, + { + "epoch": 0.07911446713574738, + "grad_norm": 2.7474189241133495, + "learning_rate": 1.9873909397982385e-05, + "loss": 1.7367, + "step": 461 + }, + { + "epoch": 0.07928608203192038, + "grad_norm": 3.7147745663511347, + "learning_rate": 1.987302797796999e-05, + "loss": 1.9826, + "step": 462 + }, + { + "epoch": 0.07945769692809336, + "grad_norm": 4.23555155133059, + "learning_rate": 1.987214350763483e-05, + "loss": 2.182, + "step": 463 + }, + { + "epoch": 0.07962931182426634, + "grad_norm": 2.9681099902927888, + "learning_rate": 1.9871255987250172e-05, + "loss": 1.8332, + "step": 464 + }, + { + "epoch": 0.07980092672043933, + "grad_norm": 2.644610422448092, + "learning_rate": 1.9870365417090217e-05, + "loss": 1.9012, + "step": 465 + }, + { + "epoch": 0.07997254161661232, + "grad_norm": 3.085899852043181, + "learning_rate": 1.9869471797430112e-05, + "loss": 1.8466, + "step": 466 + }, + { + "epoch": 0.08014415651278531, + "grad_norm": 2.718756855467533, + "learning_rate": 1.986857512854594e-05, + "loss": 2.1873, + "step": 467 + }, + { + "epoch": 0.08031577140895829, + "grad_norm": 2.822960592290468, + "learning_rate": 1.9867675410714744e-05, + "loss": 2.2561, + "step": 468 + }, + { + "epoch": 0.08048738630513129, + "grad_norm": 2.7080760757881848, + "learning_rate": 1.9866772644214484e-05, + "loss": 1.8981, + "step": 469 + }, + { + "epoch": 0.08065900120130427, + "grad_norm": 2.8606334579442803, + "learning_rate": 1.986586682932408e-05, + "loss": 1.8657, + "step": 470 + }, + { + "epoch": 0.08083061609747726, + "grad_norm": 2.9320038272501243, + "learning_rate": 1.986495796632339e-05, + "loss": 2.1208, + "step": 471 + }, + { + "epoch": 0.08100223099365025, + "grad_norm": 3.3310414941403983, + "learning_rate": 1.9864046055493205e-05, + "loss": 1.8079, + "step": 472 + }, + { + "epoch": 0.08117384588982324, + "grad_norm": 4.173030541213135, + "learning_rate": 1.9863131097115273e-05, + "loss": 2.1978, + "step": 473 + }, + { + "epoch": 0.08134546078599622, + "grad_norm": 3.2631957690217166, + "learning_rate": 1.9862213091472267e-05, + "loss": 1.6694, + "step": 474 + }, + { + "epoch": 0.08151707568216922, + "grad_norm": 3.6549279485111072, + "learning_rate": 1.9861292038847818e-05, + "loss": 2.1436, + "step": 475 + }, + { + "epoch": 0.0816886905783422, + "grad_norm": 3.4579723786448406, + "learning_rate": 1.9860367939526482e-05, + "loss": 2.0133, + "step": 476 + }, + { + "epoch": 0.08186030547451519, + "grad_norm": 3.502710560745024, + "learning_rate": 1.9859440793793766e-05, + "loss": 1.9742, + "step": 477 + }, + { + "epoch": 0.08203192037068817, + "grad_norm": 3.544396288978015, + "learning_rate": 1.9858510601936124e-05, + "loss": 2.1805, + "step": 478 + }, + { + "epoch": 0.08220353526686117, + "grad_norm": 3.443255565471956, + "learning_rate": 1.9857577364240933e-05, + "loss": 2.0108, + "step": 479 + }, + { + "epoch": 0.08237515016303415, + "grad_norm": 2.810740942597221, + "learning_rate": 1.9856641080996527e-05, + "loss": 2.1126, + "step": 480 + }, + { + "epoch": 0.08254676505920713, + "grad_norm": 2.6304720077998645, + "learning_rate": 1.9855701752492174e-05, + "loss": 1.9508, + "step": 481 + }, + { + "epoch": 0.08271837995538013, + "grad_norm": 3.4067239352352443, + "learning_rate": 1.985475937901809e-05, + "loss": 2.0391, + "step": 482 + }, + { + "epoch": 0.08288999485155311, + "grad_norm": 2.7479962805697555, + "learning_rate": 1.985381396086542e-05, + "loss": 1.7972, + "step": 483 + }, + { + "epoch": 0.0830616097477261, + "grad_norm": 3.4900530274096995, + "learning_rate": 1.9852865498326253e-05, + "loss": 1.961, + "step": 484 + }, + { + "epoch": 0.0832332246438991, + "grad_norm": 2.5491101161030776, + "learning_rate": 1.9851913991693626e-05, + "loss": 2.1167, + "step": 485 + }, + { + "epoch": 0.08340483954007208, + "grad_norm": 2.924649694838515, + "learning_rate": 1.9850959441261514e-05, + "loss": 1.8073, + "step": 486 + }, + { + "epoch": 0.08357645443624506, + "grad_norm": 3.536472957231003, + "learning_rate": 1.9850001847324825e-05, + "loss": 1.9382, + "step": 487 + }, + { + "epoch": 0.08374806933241806, + "grad_norm": 3.054182657170999, + "learning_rate": 1.984904121017942e-05, + "loss": 2.0425, + "step": 488 + }, + { + "epoch": 0.08391968422859104, + "grad_norm": 2.7927409849462945, + "learning_rate": 1.9848077530122083e-05, + "loss": 2.0228, + "step": 489 + }, + { + "epoch": 0.08409129912476403, + "grad_norm": 2.57527649206855, + "learning_rate": 1.9847110807450555e-05, + "loss": 1.8985, + "step": 490 + }, + { + "epoch": 0.08426291402093702, + "grad_norm": 3.4586774197398036, + "learning_rate": 1.9846141042463505e-05, + "loss": 1.7976, + "step": 491 + }, + { + "epoch": 0.08443452891711001, + "grad_norm": 2.7668380943368076, + "learning_rate": 1.9845168235460553e-05, + "loss": 1.9634, + "step": 492 + }, + { + "epoch": 0.08460614381328299, + "grad_norm": 3.4242371710848163, + "learning_rate": 1.984419238674225e-05, + "loss": 2.0589, + "step": 493 + }, + { + "epoch": 0.08477775870945597, + "grad_norm": 2.7113501894419514, + "learning_rate": 1.9843213496610086e-05, + "loss": 1.964, + "step": 494 + }, + { + "epoch": 0.08494937360562897, + "grad_norm": 3.058832849825258, + "learning_rate": 1.98422315653665e-05, + "loss": 1.7989, + "step": 495 + }, + { + "epoch": 0.08512098850180196, + "grad_norm": 4.080728644514222, + "learning_rate": 1.984124659331486e-05, + "loss": 1.9278, + "step": 496 + }, + { + "epoch": 0.08529260339797494, + "grad_norm": 3.0226243545744182, + "learning_rate": 1.984025858075948e-05, + "loss": 1.8404, + "step": 497 + }, + { + "epoch": 0.08546421829414794, + "grad_norm": 3.370632563740033, + "learning_rate": 1.983926752800561e-05, + "loss": 1.9167, + "step": 498 + }, + { + "epoch": 0.08563583319032092, + "grad_norm": 3.978424419074891, + "learning_rate": 1.9838273435359447e-05, + "loss": 1.9387, + "step": 499 + }, + { + "epoch": 0.0858074480864939, + "grad_norm": 3.9857709854862575, + "learning_rate": 1.983727630312811e-05, + "loss": 1.9806, + "step": 500 + }, + { + "epoch": 0.0859790629826669, + "grad_norm": 3.078979700594615, + "learning_rate": 1.983627613161968e-05, + "loss": 2.0171, + "step": 501 + }, + { + "epoch": 0.08615067787883988, + "grad_norm": 2.5471736174880104, + "learning_rate": 1.9835272921143156e-05, + "loss": 2.1804, + "step": 502 + }, + { + "epoch": 0.08632229277501287, + "grad_norm": 3.4829829985310585, + "learning_rate": 1.983426667200849e-05, + "loss": 2.0033, + "step": 503 + }, + { + "epoch": 0.08649390767118587, + "grad_norm": 2.95167871963179, + "learning_rate": 1.983325738452657e-05, + "loss": 2.0968, + "step": 504 + }, + { + "epoch": 0.08666552256735885, + "grad_norm": 3.360596300617668, + "learning_rate": 1.983224505900921e-05, + "loss": 2.0263, + "step": 505 + }, + { + "epoch": 0.08683713746353183, + "grad_norm": 8.967978708069104, + "learning_rate": 1.9831229695769184e-05, + "loss": 2.1001, + "step": 506 + }, + { + "epoch": 0.08700875235970482, + "grad_norm": 2.5752979411092767, + "learning_rate": 1.983021129512019e-05, + "loss": 1.8587, + "step": 507 + }, + { + "epoch": 0.08718036725587781, + "grad_norm": 2.657936696316239, + "learning_rate": 1.982918985737687e-05, + "loss": 1.8626, + "step": 508 + }, + { + "epoch": 0.0873519821520508, + "grad_norm": 4.0533900902450855, + "learning_rate": 1.9828165382854798e-05, + "loss": 1.9509, + "step": 509 + }, + { + "epoch": 0.08752359704822378, + "grad_norm": 4.45646664322224, + "learning_rate": 1.9827137871870494e-05, + "loss": 1.9707, + "step": 510 + }, + { + "epoch": 0.08769521194439678, + "grad_norm": 2.8055586834923765, + "learning_rate": 1.9826107324741413e-05, + "loss": 1.6581, + "step": 511 + }, + { + "epoch": 0.08786682684056976, + "grad_norm": 3.013466058812041, + "learning_rate": 1.9825073741785946e-05, + "loss": 2.0867, + "step": 512 + }, + { + "epoch": 0.08803844173674275, + "grad_norm": 2.760088907057879, + "learning_rate": 1.9824037123323423e-05, + "loss": 2.1439, + "step": 513 + }, + { + "epoch": 0.08821005663291574, + "grad_norm": 3.117211945576528, + "learning_rate": 1.9822997469674116e-05, + "loss": 1.9068, + "step": 514 + }, + { + "epoch": 0.08838167152908873, + "grad_norm": 3.3500210513213684, + "learning_rate": 1.9821954781159224e-05, + "loss": 1.7608, + "step": 515 + }, + { + "epoch": 0.08855328642526171, + "grad_norm": 3.0002791868067815, + "learning_rate": 1.9820909058100904e-05, + "loss": 1.8047, + "step": 516 + }, + { + "epoch": 0.08872490132143471, + "grad_norm": 2.51459532589054, + "learning_rate": 1.9819860300822227e-05, + "loss": 1.9093, + "step": 517 + }, + { + "epoch": 0.08889651621760769, + "grad_norm": 8.390605370624675, + "learning_rate": 1.9818808509647215e-05, + "loss": 2.1025, + "step": 518 + }, + { + "epoch": 0.08906813111378067, + "grad_norm": 3.9247418992069014, + "learning_rate": 1.9817753684900826e-05, + "loss": 1.8867, + "step": 519 + }, + { + "epoch": 0.08923974600995366, + "grad_norm": 3.067543628105918, + "learning_rate": 1.9816695826908948e-05, + "loss": 1.7059, + "step": 520 + }, + { + "epoch": 0.08941136090612666, + "grad_norm": 3.0720250388049624, + "learning_rate": 1.9815634935998417e-05, + "loss": 1.9277, + "step": 521 + }, + { + "epoch": 0.08958297580229964, + "grad_norm": 4.512210623445909, + "learning_rate": 1.9814571012497e-05, + "loss": 2.1153, + "step": 522 + }, + { + "epoch": 0.08975459069847262, + "grad_norm": 3.3849956847818654, + "learning_rate": 1.9813504056733403e-05, + "loss": 1.9046, + "step": 523 + }, + { + "epoch": 0.08992620559464562, + "grad_norm": 2.975847294834541, + "learning_rate": 1.981243406903726e-05, + "loss": 2.0535, + "step": 524 + }, + { + "epoch": 0.0900978204908186, + "grad_norm": 5.271145305845035, + "learning_rate": 1.981136104973916e-05, + "loss": 2.0219, + "step": 525 + }, + { + "epoch": 0.09026943538699159, + "grad_norm": 3.6965403948842974, + "learning_rate": 1.981028499917061e-05, + "loss": 1.8159, + "step": 526 + }, + { + "epoch": 0.09044105028316458, + "grad_norm": 3.370738627265268, + "learning_rate": 1.9809205917664064e-05, + "loss": 1.7585, + "step": 527 + }, + { + "epoch": 0.09061266517933757, + "grad_norm": 3.3626169915707336, + "learning_rate": 1.9808123805552908e-05, + "loss": 1.7938, + "step": 528 + }, + { + "epoch": 0.09078428007551055, + "grad_norm": 4.035214519495458, + "learning_rate": 1.980703866317147e-05, + "loss": 2.1773, + "step": 529 + }, + { + "epoch": 0.09095589497168355, + "grad_norm": 7.440776600318674, + "learning_rate": 1.9805950490855003e-05, + "loss": 1.8127, + "step": 530 + }, + { + "epoch": 0.09112750986785653, + "grad_norm": 3.4549605133639236, + "learning_rate": 1.980485928893971e-05, + "loss": 2.0026, + "step": 531 + }, + { + "epoch": 0.09129912476402952, + "grad_norm": 5.6791317707563325, + "learning_rate": 1.9803765057762723e-05, + "loss": 1.8348, + "step": 532 + }, + { + "epoch": 0.0914707396602025, + "grad_norm": 3.2830422913475052, + "learning_rate": 1.9802667797662113e-05, + "loss": 1.53, + "step": 533 + }, + { + "epoch": 0.0916423545563755, + "grad_norm": 3.356830892760497, + "learning_rate": 1.9801567508976874e-05, + "loss": 1.8351, + "step": 534 + }, + { + "epoch": 0.09181396945254848, + "grad_norm": 4.870122528034997, + "learning_rate": 1.9800464192046956e-05, + "loss": 2.1619, + "step": 535 + }, + { + "epoch": 0.09198558434872146, + "grad_norm": 2.9268404406739297, + "learning_rate": 1.979935784721323e-05, + "loss": 1.8741, + "step": 536 + }, + { + "epoch": 0.09215719924489446, + "grad_norm": 3.8006197984773853, + "learning_rate": 1.9798248474817506e-05, + "loss": 1.7688, + "step": 537 + }, + { + "epoch": 0.09232881414106744, + "grad_norm": 3.1193126068672266, + "learning_rate": 1.9797136075202532e-05, + "loss": 2.1704, + "step": 538 + }, + { + "epoch": 0.09250042903724043, + "grad_norm": 3.724141320543389, + "learning_rate": 1.979602064871199e-05, + "loss": 2.056, + "step": 539 + }, + { + "epoch": 0.09267204393341343, + "grad_norm": 4.097581038475727, + "learning_rate": 1.9794902195690494e-05, + "loss": 2.0488, + "step": 540 + }, + { + "epoch": 0.09284365882958641, + "grad_norm": 2.577689561051121, + "learning_rate": 1.97937807164836e-05, + "loss": 1.9698, + "step": 541 + }, + { + "epoch": 0.09301527372575939, + "grad_norm": 3.311870050584975, + "learning_rate": 1.979265621143779e-05, + "loss": 1.7667, + "step": 542 + }, + { + "epoch": 0.09318688862193239, + "grad_norm": 3.7420977288530444, + "learning_rate": 1.9791528680900494e-05, + "loss": 1.8669, + "step": 543 + }, + { + "epoch": 0.09335850351810537, + "grad_norm": 3.128448264247861, + "learning_rate": 1.9790398125220056e-05, + "loss": 2.1067, + "step": 544 + }, + { + "epoch": 0.09353011841427836, + "grad_norm": 2.8846794252262726, + "learning_rate": 1.9789264544745776e-05, + "loss": 1.749, + "step": 545 + }, + { + "epoch": 0.09370173331045134, + "grad_norm": 3.151280219152059, + "learning_rate": 1.978812793982788e-05, + "loss": 2.0316, + "step": 546 + }, + { + "epoch": 0.09387334820662434, + "grad_norm": 3.0588625058884094, + "learning_rate": 1.9786988310817523e-05, + "loss": 2.0002, + "step": 547 + }, + { + "epoch": 0.09404496310279732, + "grad_norm": 3.19267554194025, + "learning_rate": 1.97858456580668e-05, + "loss": 1.7847, + "step": 548 + }, + { + "epoch": 0.0942165779989703, + "grad_norm": 4.359375800283079, + "learning_rate": 1.9784699981928742e-05, + "loss": 2.0769, + "step": 549 + }, + { + "epoch": 0.0943881928951433, + "grad_norm": 3.7311402607739437, + "learning_rate": 1.9783551282757312e-05, + "loss": 1.9001, + "step": 550 + }, + { + "epoch": 0.09455980779131629, + "grad_norm": 4.065594334215484, + "learning_rate": 1.9782399560907402e-05, + "loss": 1.747, + "step": 551 + }, + { + "epoch": 0.09473142268748927, + "grad_norm": 3.8955969320944206, + "learning_rate": 1.9781244816734846e-05, + "loss": 1.974, + "step": 552 + }, + { + "epoch": 0.09490303758366227, + "grad_norm": 5.863358963140666, + "learning_rate": 1.9780087050596407e-05, + "loss": 1.9237, + "step": 553 + }, + { + "epoch": 0.09507465247983525, + "grad_norm": 5.016966928614574, + "learning_rate": 1.9778926262849786e-05, + "loss": 1.8072, + "step": 554 + }, + { + "epoch": 0.09524626737600823, + "grad_norm": 4.709542800475869, + "learning_rate": 1.9777762453853607e-05, + "loss": 1.7808, + "step": 555 + }, + { + "epoch": 0.09541788227218123, + "grad_norm": 3.6423757917494117, + "learning_rate": 1.9776595623967443e-05, + "loss": 1.7195, + "step": 556 + }, + { + "epoch": 0.09558949716835421, + "grad_norm": 3.6524386217677813, + "learning_rate": 1.9775425773551788e-05, + "loss": 1.6905, + "step": 557 + }, + { + "epoch": 0.0957611120645272, + "grad_norm": 4.827821550933328, + "learning_rate": 1.977425290296807e-05, + "loss": 1.9539, + "step": 558 + }, + { + "epoch": 0.09593272696070018, + "grad_norm": 5.935373963702225, + "learning_rate": 1.977307701257866e-05, + "loss": 1.8562, + "step": 559 + }, + { + "epoch": 0.09610434185687318, + "grad_norm": 3.373470791522794, + "learning_rate": 1.9771898102746852e-05, + "loss": 1.9662, + "step": 560 + }, + { + "epoch": 0.09627595675304616, + "grad_norm": 3.2249559209608067, + "learning_rate": 1.9770716173836877e-05, + "loss": 2.1233, + "step": 561 + }, + { + "epoch": 0.09644757164921915, + "grad_norm": 2.8806275169368702, + "learning_rate": 1.9769531226213897e-05, + "loss": 1.9166, + "step": 562 + }, + { + "epoch": 0.09661918654539214, + "grad_norm": 2.846031982418901, + "learning_rate": 1.9768343260244008e-05, + "loss": 2.1811, + "step": 563 + }, + { + "epoch": 0.09679080144156513, + "grad_norm": 2.6801343098220665, + "learning_rate": 1.9767152276294238e-05, + "loss": 1.9713, + "step": 564 + }, + { + "epoch": 0.09696241633773811, + "grad_norm": 3.3555235026297776, + "learning_rate": 1.976595827473255e-05, + "loss": 1.8181, + "step": 565 + }, + { + "epoch": 0.09713403123391111, + "grad_norm": 2.7266374781964697, + "learning_rate": 1.9764761255927838e-05, + "loss": 1.9583, + "step": 566 + }, + { + "epoch": 0.09730564613008409, + "grad_norm": 2.921284455513616, + "learning_rate": 1.976356122024992e-05, + "loss": 1.9281, + "step": 567 + }, + { + "epoch": 0.09747726102625708, + "grad_norm": 6.026462412464931, + "learning_rate": 1.976235816806956e-05, + "loss": 1.9916, + "step": 568 + }, + { + "epoch": 0.09764887592243007, + "grad_norm": 3.908176239370151, + "learning_rate": 1.976115209975844e-05, + "loss": 1.9847, + "step": 569 + }, + { + "epoch": 0.09782049081860306, + "grad_norm": 3.741167402922778, + "learning_rate": 1.975994301568919e-05, + "loss": 2.0098, + "step": 570 + }, + { + "epoch": 0.09799210571477604, + "grad_norm": 3.248760159620241, + "learning_rate": 1.9758730916235356e-05, + "loss": 1.9698, + "step": 571 + }, + { + "epoch": 0.09816372061094902, + "grad_norm": 2.840623470202775, + "learning_rate": 1.9757515801771426e-05, + "loss": 1.9504, + "step": 572 + }, + { + "epoch": 0.09833533550712202, + "grad_norm": 4.6024284522510825, + "learning_rate": 1.9756297672672812e-05, + "loss": 1.9659, + "step": 573 + }, + { + "epoch": 0.098506950403295, + "grad_norm": 2.8757555299447914, + "learning_rate": 1.9755076529315867e-05, + "loss": 1.9931, + "step": 574 + }, + { + "epoch": 0.09867856529946799, + "grad_norm": 2.8663417701819576, + "learning_rate": 1.9753852372077863e-05, + "loss": 2.0503, + "step": 575 + }, + { + "epoch": 0.09885018019564099, + "grad_norm": 3.2065741772887955, + "learning_rate": 1.9752625201337017e-05, + "loss": 2.1571, + "step": 576 + }, + { + "epoch": 0.09902179509181397, + "grad_norm": 3.8679304110141564, + "learning_rate": 1.9751395017472462e-05, + "loss": 1.7899, + "step": 577 + }, + { + "epoch": 0.09919340998798695, + "grad_norm": 3.746094886403712, + "learning_rate": 1.9750161820864274e-05, + "loss": 2.0576, + "step": 578 + }, + { + "epoch": 0.09936502488415995, + "grad_norm": 3.0513299621498415, + "learning_rate": 1.974892561189346e-05, + "loss": 1.8414, + "step": 579 + }, + { + "epoch": 0.09953663978033293, + "grad_norm": 2.5881152892209793, + "learning_rate": 1.9747686390941944e-05, + "loss": 1.5498, + "step": 580 + }, + { + "epoch": 0.09970825467650592, + "grad_norm": 3.594256534330549, + "learning_rate": 1.974644415839259e-05, + "loss": 2.0439, + "step": 581 + }, + { + "epoch": 0.09987986957267891, + "grad_norm": 2.690148745647811, + "learning_rate": 1.97451989146292e-05, + "loss": 1.976, + "step": 582 + }, + { + "epoch": 0.1000514844688519, + "grad_norm": 3.0016291317241035, + "learning_rate": 1.974395066003649e-05, + "loss": 1.8832, + "step": 583 + }, + { + "epoch": 0.10022309936502488, + "grad_norm": 3.548342018027038, + "learning_rate": 1.974269939500012e-05, + "loss": 2.0379, + "step": 584 + }, + { + "epoch": 0.10039471426119788, + "grad_norm": 3.6550894878737146, + "learning_rate": 1.974144511990668e-05, + "loss": 2.0104, + "step": 585 + }, + { + "epoch": 0.10056632915737086, + "grad_norm": 3.245162718313138, + "learning_rate": 1.9740187835143673e-05, + "loss": 1.9786, + "step": 586 + }, + { + "epoch": 0.10073794405354385, + "grad_norm": 4.968097520781465, + "learning_rate": 1.973892754109955e-05, + "loss": 2.0406, + "step": 587 + }, + { + "epoch": 0.10090955894971683, + "grad_norm": 3.551799572281438, + "learning_rate": 1.9737664238163683e-05, + "loss": 1.9049, + "step": 588 + }, + { + "epoch": 0.10108117384588983, + "grad_norm": 7.569469865852626, + "learning_rate": 1.973639792672638e-05, + "loss": 2.0975, + "step": 589 + }, + { + "epoch": 0.10125278874206281, + "grad_norm": 5.167328577189202, + "learning_rate": 1.9735128607178868e-05, + "loss": 1.8706, + "step": 590 + }, + { + "epoch": 0.1014244036382358, + "grad_norm": 2.6708530788301115, + "learning_rate": 1.9733856279913314e-05, + "loss": 1.9269, + "step": 591 + }, + { + "epoch": 0.10159601853440879, + "grad_norm": 3.50025775398842, + "learning_rate": 1.9732580945322813e-05, + "loss": 1.8695, + "step": 592 + }, + { + "epoch": 0.10176763343058177, + "grad_norm": 3.17379544886861, + "learning_rate": 1.973130260380138e-05, + "loss": 1.7495, + "step": 593 + }, + { + "epoch": 0.10193924832675476, + "grad_norm": 4.612389180089776, + "learning_rate": 1.9730021255743967e-05, + "loss": 2.1471, + "step": 594 + }, + { + "epoch": 0.10211086322292776, + "grad_norm": 4.730339771370154, + "learning_rate": 1.9728736901546454e-05, + "loss": 2.026, + "step": 595 + }, + { + "epoch": 0.10228247811910074, + "grad_norm": 3.186485286061052, + "learning_rate": 1.972744954160565e-05, + "loss": 1.8984, + "step": 596 + }, + { + "epoch": 0.10245409301527372, + "grad_norm": 3.096466402518034, + "learning_rate": 1.9726159176319292e-05, + "loss": 1.7394, + "step": 597 + }, + { + "epoch": 0.10262570791144672, + "grad_norm": 3.2725057351977216, + "learning_rate": 1.972486580608604e-05, + "loss": 2.057, + "step": 598 + }, + { + "epoch": 0.1027973228076197, + "grad_norm": 3.3517565050028657, + "learning_rate": 1.972356943130549e-05, + "loss": 1.9072, + "step": 599 + }, + { + "epoch": 0.10296893770379269, + "grad_norm": 2.740538428486397, + "learning_rate": 1.9722270052378167e-05, + "loss": 1.5391, + "step": 600 + }, + { + "epoch": 0.10314055259996567, + "grad_norm": 2.947851109288251, + "learning_rate": 1.972096766970552e-05, + "loss": 2.2493, + "step": 601 + }, + { + "epoch": 0.10331216749613867, + "grad_norm": 3.4665930207183964, + "learning_rate": 1.971966228368992e-05, + "loss": 1.7292, + "step": 602 + }, + { + "epoch": 0.10348378239231165, + "grad_norm": 2.7646787832771547, + "learning_rate": 1.9718353894734677e-05, + "loss": 1.9337, + "step": 603 + }, + { + "epoch": 0.10365539728848464, + "grad_norm": 2.816464405924286, + "learning_rate": 1.9717042503244032e-05, + "loss": 1.8134, + "step": 604 + }, + { + "epoch": 0.10382701218465763, + "grad_norm": 2.793797413732475, + "learning_rate": 1.9715728109623134e-05, + "loss": 2.0687, + "step": 605 + }, + { + "epoch": 0.10399862708083062, + "grad_norm": 2.813962776958442, + "learning_rate": 1.971441071427808e-05, + "loss": 1.6167, + "step": 606 + }, + { + "epoch": 0.1041702419770036, + "grad_norm": 4.155914786093373, + "learning_rate": 1.9713090317615877e-05, + "loss": 2.1836, + "step": 607 + }, + { + "epoch": 0.1043418568731766, + "grad_norm": 3.6848162922317202, + "learning_rate": 1.971176692004448e-05, + "loss": 2.1002, + "step": 608 + }, + { + "epoch": 0.10451347176934958, + "grad_norm": 4.041843806508891, + "learning_rate": 1.971044052197275e-05, + "loss": 1.8046, + "step": 609 + }, + { + "epoch": 0.10468508666552256, + "grad_norm": 3.641295749603698, + "learning_rate": 1.970911112381049e-05, + "loss": 1.7766, + "step": 610 + }, + { + "epoch": 0.10485670156169556, + "grad_norm": 3.172220566704014, + "learning_rate": 1.9707778725968427e-05, + "loss": 1.7329, + "step": 611 + }, + { + "epoch": 0.10502831645786855, + "grad_norm": 4.395258415141472, + "learning_rate": 1.97064433288582e-05, + "loss": 1.9403, + "step": 612 + }, + { + "epoch": 0.10519993135404153, + "grad_norm": 3.439688326160458, + "learning_rate": 1.9705104932892398e-05, + "loss": 1.8274, + "step": 613 + }, + { + "epoch": 0.10537154625021451, + "grad_norm": 3.5392487781649966, + "learning_rate": 1.9703763538484525e-05, + "loss": 1.9361, + "step": 614 + }, + { + "epoch": 0.10554316114638751, + "grad_norm": 5.462091347217853, + "learning_rate": 1.9702419146049004e-05, + "loss": 2.1089, + "step": 615 + }, + { + "epoch": 0.1057147760425605, + "grad_norm": 3.408580501453271, + "learning_rate": 1.97010717560012e-05, + "loss": 1.9725, + "step": 616 + }, + { + "epoch": 0.10588639093873348, + "grad_norm": 3.013125385132649, + "learning_rate": 1.969972136875739e-05, + "loss": 1.9127, + "step": 617 + }, + { + "epoch": 0.10605800583490647, + "grad_norm": 4.047263351078423, + "learning_rate": 1.9698367984734786e-05, + "loss": 1.7079, + "step": 618 + }, + { + "epoch": 0.10622962073107946, + "grad_norm": 3.579643650818717, + "learning_rate": 1.9697011604351524e-05, + "loss": 1.9259, + "step": 619 + }, + { + "epoch": 0.10640123562725244, + "grad_norm": 3.216916581264715, + "learning_rate": 1.9695652228026664e-05, + "loss": 1.7663, + "step": 620 + }, + { + "epoch": 0.10657285052342544, + "grad_norm": 2.4013460522156334, + "learning_rate": 1.969428985618019e-05, + "loss": 1.5992, + "step": 621 + }, + { + "epoch": 0.10674446541959842, + "grad_norm": 3.536308684264547, + "learning_rate": 1.9692924489233015e-05, + "loss": 1.8546, + "step": 622 + }, + { + "epoch": 0.1069160803157714, + "grad_norm": 4.486968367615817, + "learning_rate": 1.969155612760698e-05, + "loss": 2.0518, + "step": 623 + }, + { + "epoch": 0.1070876952119444, + "grad_norm": 3.1146699884436604, + "learning_rate": 1.9690184771724843e-05, + "loss": 1.9719, + "step": 624 + }, + { + "epoch": 0.10725931010811739, + "grad_norm": 2.723565022988268, + "learning_rate": 1.968881042201029e-05, + "loss": 1.6757, + "step": 625 + }, + { + "epoch": 0.10743092500429037, + "grad_norm": 2.5503330278692276, + "learning_rate": 1.968743307888794e-05, + "loss": 1.8799, + "step": 626 + }, + { + "epoch": 0.10760253990046335, + "grad_norm": 2.5208950324474064, + "learning_rate": 1.9686052742783324e-05, + "loss": 1.9736, + "step": 627 + }, + { + "epoch": 0.10777415479663635, + "grad_norm": 2.7016050673369527, + "learning_rate": 1.9684669414122905e-05, + "loss": 1.7507, + "step": 628 + }, + { + "epoch": 0.10794576969280933, + "grad_norm": 4.3722822684553435, + "learning_rate": 1.968328309333407e-05, + "loss": 1.8855, + "step": 629 + }, + { + "epoch": 0.10811738458898232, + "grad_norm": 4.463669412847087, + "learning_rate": 1.9681893780845135e-05, + "loss": 1.6386, + "step": 630 + }, + { + "epoch": 0.10828899948515532, + "grad_norm": 5.27048101943173, + "learning_rate": 1.9680501477085326e-05, + "loss": 1.9182, + "step": 631 + }, + { + "epoch": 0.1084606143813283, + "grad_norm": 2.9816302262616774, + "learning_rate": 1.967910618248481e-05, + "loss": 1.8867, + "step": 632 + }, + { + "epoch": 0.10863222927750128, + "grad_norm": 4.1278007050382595, + "learning_rate": 1.9677707897474666e-05, + "loss": 2.2158, + "step": 633 + }, + { + "epoch": 0.10880384417367428, + "grad_norm": 2.7763390061635174, + "learning_rate": 1.9676306622486905e-05, + "loss": 1.8851, + "step": 634 + }, + { + "epoch": 0.10897545906984726, + "grad_norm": 2.6991119336027625, + "learning_rate": 1.9674902357954452e-05, + "loss": 1.873, + "step": 635 + }, + { + "epoch": 0.10914707396602025, + "grad_norm": 3.247815678741392, + "learning_rate": 1.9673495104311173e-05, + "loss": 2.0542, + "step": 636 + }, + { + "epoch": 0.10931868886219324, + "grad_norm": 4.928723626057146, + "learning_rate": 1.967208486199183e-05, + "loss": 2.044, + "step": 637 + }, + { + "epoch": 0.10949030375836623, + "grad_norm": 8.847684867762078, + "learning_rate": 1.967067163143214e-05, + "loss": 1.8528, + "step": 638 + }, + { + "epoch": 0.10966191865453921, + "grad_norm": 4.138800403458856, + "learning_rate": 1.966925541306872e-05, + "loss": 1.9944, + "step": 639 + }, + { + "epoch": 0.1098335335507122, + "grad_norm": 3.2290060979119546, + "learning_rate": 1.966783620733912e-05, + "loss": 1.7702, + "step": 640 + }, + { + "epoch": 0.11000514844688519, + "grad_norm": 3.5287648021841154, + "learning_rate": 1.966641401468181e-05, + "loss": 1.7717, + "step": 641 + }, + { + "epoch": 0.11017676334305818, + "grad_norm": 4.85279740952659, + "learning_rate": 1.9664988835536182e-05, + "loss": 1.9606, + "step": 642 + }, + { + "epoch": 0.11034837823923116, + "grad_norm": 3.252648610700815, + "learning_rate": 1.966356067034256e-05, + "loss": 2.1478, + "step": 643 + }, + { + "epoch": 0.11051999313540416, + "grad_norm": 2.4114083412323613, + "learning_rate": 1.966212951954217e-05, + "loss": 1.6045, + "step": 644 + }, + { + "epoch": 0.11069160803157714, + "grad_norm": 3.81809633012379, + "learning_rate": 1.9660695383577186e-05, + "loss": 1.8116, + "step": 645 + }, + { + "epoch": 0.11086322292775012, + "grad_norm": 4.1287708371918495, + "learning_rate": 1.9659258262890683e-05, + "loss": 1.9988, + "step": 646 + }, + { + "epoch": 0.11103483782392312, + "grad_norm": 6.137133294763594, + "learning_rate": 1.9657818157926677e-05, + "loss": 2.1113, + "step": 647 + }, + { + "epoch": 0.1112064527200961, + "grad_norm": 2.6768078380956473, + "learning_rate": 1.965637506913008e-05, + "loss": 1.833, + "step": 648 + }, + { + "epoch": 0.11137806761626909, + "grad_norm": 2.9178277443453053, + "learning_rate": 1.9654928996946758e-05, + "loss": 1.8341, + "step": 649 + }, + { + "epoch": 0.11154968251244209, + "grad_norm": 4.2718234230700745, + "learning_rate": 1.9653479941823473e-05, + "loss": 2.0536, + "step": 650 + }, + { + "epoch": 0.11172129740861507, + "grad_norm": 3.0820747168150713, + "learning_rate": 1.9652027904207923e-05, + "loss": 1.9344, + "step": 651 + }, + { + "epoch": 0.11189291230478805, + "grad_norm": 2.892465972319917, + "learning_rate": 1.9650572884548715e-05, + "loss": 1.9195, + "step": 652 + }, + { + "epoch": 0.11206452720096104, + "grad_norm": 2.179473271607477, + "learning_rate": 1.9649114883295393e-05, + "loss": 1.6974, + "step": 653 + }, + { + "epoch": 0.11223614209713403, + "grad_norm": 2.8238338911288143, + "learning_rate": 1.9647653900898408e-05, + "loss": 1.8772, + "step": 654 + }, + { + "epoch": 0.11240775699330702, + "grad_norm": 2.9846740287773157, + "learning_rate": 1.9646189937809145e-05, + "loss": 1.8713, + "step": 655 + }, + { + "epoch": 0.11257937188948, + "grad_norm": 3.1163205886163605, + "learning_rate": 1.9644722994479898e-05, + "loss": 1.9222, + "step": 656 + }, + { + "epoch": 0.112750986785653, + "grad_norm": 3.6345679253974135, + "learning_rate": 1.9643253071363885e-05, + "loss": 2.0683, + "step": 657 + }, + { + "epoch": 0.11292260168182598, + "grad_norm": 3.6614704530939512, + "learning_rate": 1.9641780168915247e-05, + "loss": 2.0771, + "step": 658 + }, + { + "epoch": 0.11309421657799897, + "grad_norm": 3.057366967433169, + "learning_rate": 1.964030428758905e-05, + "loss": 1.9282, + "step": 659 + }, + { + "epoch": 0.11326583147417196, + "grad_norm": 5.2361901605509145, + "learning_rate": 1.963882542784127e-05, + "loss": 2.0172, + "step": 660 + }, + { + "epoch": 0.11343744637034495, + "grad_norm": 2.615700006965504, + "learning_rate": 1.963734359012881e-05, + "loss": 1.9016, + "step": 661 + }, + { + "epoch": 0.11360906126651793, + "grad_norm": 2.909416564121831, + "learning_rate": 1.9635858774909492e-05, + "loss": 1.6456, + "step": 662 + }, + { + "epoch": 0.11378067616269093, + "grad_norm": 3.9147334343482876, + "learning_rate": 1.9634370982642056e-05, + "loss": 2.0586, + "step": 663 + }, + { + "epoch": 0.11395229105886391, + "grad_norm": 3.2735393451851533, + "learning_rate": 1.9632880213786164e-05, + "loss": 2.086, + "step": 664 + }, + { + "epoch": 0.1141239059550369, + "grad_norm": 3.9843737550808327, + "learning_rate": 1.9631386468802397e-05, + "loss": 1.7187, + "step": 665 + }, + { + "epoch": 0.11429552085120989, + "grad_norm": 2.8896273668287393, + "learning_rate": 1.9629889748152252e-05, + "loss": 1.8384, + "step": 666 + }, + { + "epoch": 0.11446713574738288, + "grad_norm": 3.5645648642231187, + "learning_rate": 1.9628390052298155e-05, + "loss": 1.8703, + "step": 667 + }, + { + "epoch": 0.11463875064355586, + "grad_norm": 2.5079097656523603, + "learning_rate": 1.962688738170344e-05, + "loss": 1.9289, + "step": 668 + }, + { + "epoch": 0.11481036553972884, + "grad_norm": 2.8016352796236306, + "learning_rate": 1.9625381736832363e-05, + "loss": 1.986, + "step": 669 + }, + { + "epoch": 0.11498198043590184, + "grad_norm": 2.8181450705992424, + "learning_rate": 1.9623873118150105e-05, + "loss": 1.8151, + "step": 670 + }, + { + "epoch": 0.11515359533207482, + "grad_norm": 2.557212183770673, + "learning_rate": 1.962236152612276e-05, + "loss": 1.8604, + "step": 671 + }, + { + "epoch": 0.11532521022824781, + "grad_norm": 3.112788750888735, + "learning_rate": 1.9620846961217342e-05, + "loss": 1.6763, + "step": 672 + }, + { + "epoch": 0.1154968251244208, + "grad_norm": 3.583075138051105, + "learning_rate": 1.9619329423901782e-05, + "loss": 1.842, + "step": 673 + }, + { + "epoch": 0.11566844002059379, + "grad_norm": 3.0603181131811112, + "learning_rate": 1.9617808914644932e-05, + "loss": 2.0202, + "step": 674 + }, + { + "epoch": 0.11584005491676677, + "grad_norm": 2.7594121002432157, + "learning_rate": 1.9616285433916564e-05, + "loss": 1.9104, + "step": 675 + }, + { + "epoch": 0.11601166981293977, + "grad_norm": 2.0308915383609016, + "learning_rate": 1.961475898218736e-05, + "loss": 1.5745, + "step": 676 + }, + { + "epoch": 0.11618328470911275, + "grad_norm": 2.6653189468467957, + "learning_rate": 1.961322955992893e-05, + "loss": 1.904, + "step": 677 + }, + { + "epoch": 0.11635489960528574, + "grad_norm": 3.722748499584908, + "learning_rate": 1.9611697167613796e-05, + "loss": 1.7403, + "step": 678 + }, + { + "epoch": 0.11652651450145873, + "grad_norm": 3.4872868367061134, + "learning_rate": 1.96101618057154e-05, + "loss": 1.7692, + "step": 679 + }, + { + "epoch": 0.11669812939763172, + "grad_norm": 3.041586950031621, + "learning_rate": 1.960862347470809e-05, + "loss": 1.7355, + "step": 680 + }, + { + "epoch": 0.1168697442938047, + "grad_norm": 3.13573484165979, + "learning_rate": 1.9607082175067153e-05, + "loss": 1.9715, + "step": 681 + }, + { + "epoch": 0.11704135918997768, + "grad_norm": 2.702380065137708, + "learning_rate": 1.9605537907268776e-05, + "loss": 1.7196, + "step": 682 + }, + { + "epoch": 0.11721297408615068, + "grad_norm": 3.3994377792098733, + "learning_rate": 1.960399067179007e-05, + "loss": 1.8419, + "step": 683 + }, + { + "epoch": 0.11738458898232366, + "grad_norm": 3.5898581174795403, + "learning_rate": 1.9602440469109062e-05, + "loss": 1.8036, + "step": 684 + }, + { + "epoch": 0.11755620387849665, + "grad_norm": 2.549191167890007, + "learning_rate": 1.9600887299704694e-05, + "loss": 1.9944, + "step": 685 + }, + { + "epoch": 0.11772781877466965, + "grad_norm": 2.8067638997963087, + "learning_rate": 1.9599331164056823e-05, + "loss": 2.1468, + "step": 686 + }, + { + "epoch": 0.11789943367084263, + "grad_norm": 3.2503872066757644, + "learning_rate": 1.959777206264623e-05, + "loss": 1.7988, + "step": 687 + }, + { + "epoch": 0.11807104856701561, + "grad_norm": 2.5655492270826046, + "learning_rate": 1.9596209995954606e-05, + "loss": 1.9896, + "step": 688 + }, + { + "epoch": 0.11824266346318861, + "grad_norm": 3.4038373020167003, + "learning_rate": 1.9594644964464552e-05, + "loss": 1.8029, + "step": 689 + }, + { + "epoch": 0.1184142783593616, + "grad_norm": 2.2470274647188173, + "learning_rate": 1.9593076968659606e-05, + "loss": 1.8847, + "step": 690 + }, + { + "epoch": 0.11858589325553458, + "grad_norm": 3.3166196832228665, + "learning_rate": 1.95915060090242e-05, + "loss": 1.8632, + "step": 691 + }, + { + "epoch": 0.11875750815170757, + "grad_norm": 3.1911420202071628, + "learning_rate": 1.958993208604369e-05, + "loss": 2.1536, + "step": 692 + }, + { + "epoch": 0.11892912304788056, + "grad_norm": 3.610909895768703, + "learning_rate": 1.9588355200204356e-05, + "loss": 1.9012, + "step": 693 + }, + { + "epoch": 0.11910073794405354, + "grad_norm": 3.8160243655922845, + "learning_rate": 1.958677535199337e-05, + "loss": 1.8011, + "step": 694 + }, + { + "epoch": 0.11927235284022653, + "grad_norm": 2.980075594564947, + "learning_rate": 1.9585192541898847e-05, + "loss": 1.7993, + "step": 695 + }, + { + "epoch": 0.11944396773639952, + "grad_norm": 3.67289866574711, + "learning_rate": 1.9583606770409798e-05, + "loss": 1.8665, + "step": 696 + }, + { + "epoch": 0.1196155826325725, + "grad_norm": 3.260134076805343, + "learning_rate": 1.9582018038016156e-05, + "loss": 2.0675, + "step": 697 + }, + { + "epoch": 0.11978719752874549, + "grad_norm": 2.6784053949660467, + "learning_rate": 1.9580426345208773e-05, + "loss": 1.5103, + "step": 698 + }, + { + "epoch": 0.11995881242491849, + "grad_norm": 3.074552583094222, + "learning_rate": 1.95788316924794e-05, + "loss": 1.7144, + "step": 699 + }, + { + "epoch": 0.12013042732109147, + "grad_norm": 5.172743445453561, + "learning_rate": 1.9577234080320725e-05, + "loss": 1.8502, + "step": 700 + }, + { + "epoch": 0.12030204221726445, + "grad_norm": 3.056563009474087, + "learning_rate": 1.957563350922633e-05, + "loss": 1.8261, + "step": 701 + }, + { + "epoch": 0.12047365711343745, + "grad_norm": 3.1842263189341007, + "learning_rate": 1.957402997969073e-05, + "loss": 1.9883, + "step": 702 + }, + { + "epoch": 0.12064527200961044, + "grad_norm": 5.0630167880209065, + "learning_rate": 1.957242349220933e-05, + "loss": 1.6892, + "step": 703 + }, + { + "epoch": 0.12081688690578342, + "grad_norm": 3.0565241361852205, + "learning_rate": 1.9570814047278473e-05, + "loss": 1.7964, + "step": 704 + }, + { + "epoch": 0.12098850180195642, + "grad_norm": 2.785422063330768, + "learning_rate": 1.95692016453954e-05, + "loss": 1.7392, + "step": 705 + }, + { + "epoch": 0.1211601166981294, + "grad_norm": 2.849393011403411, + "learning_rate": 1.9567586287058274e-05, + "loss": 1.8318, + "step": 706 + }, + { + "epoch": 0.12133173159430238, + "grad_norm": 3.6018994122316577, + "learning_rate": 1.9565967972766164e-05, + "loss": 1.9907, + "step": 707 + }, + { + "epoch": 0.12150334649047537, + "grad_norm": 2.891249821928477, + "learning_rate": 1.956434670301906e-05, + "loss": 1.9956, + "step": 708 + }, + { + "epoch": 0.12167496138664836, + "grad_norm": 3.58828544806441, + "learning_rate": 1.956272247831786e-05, + "loss": 2.0249, + "step": 709 + }, + { + "epoch": 0.12184657628282135, + "grad_norm": 2.8488348227855935, + "learning_rate": 1.956109529916438e-05, + "loss": 1.9498, + "step": 710 + }, + { + "epoch": 0.12201819117899433, + "grad_norm": 2.8837550100220803, + "learning_rate": 1.9559465166061344e-05, + "loss": 1.6279, + "step": 711 + }, + { + "epoch": 0.12218980607516733, + "grad_norm": 3.320573283921507, + "learning_rate": 1.9557832079512387e-05, + "loss": 1.8533, + "step": 712 + }, + { + "epoch": 0.12236142097134031, + "grad_norm": 2.4239358301693277, + "learning_rate": 1.9556196040022063e-05, + "loss": 1.8177, + "step": 713 + }, + { + "epoch": 0.1225330358675133, + "grad_norm": 3.5073093120306496, + "learning_rate": 1.955455704809583e-05, + "loss": 1.8524, + "step": 714 + }, + { + "epoch": 0.1227046507636863, + "grad_norm": 3.072178814401796, + "learning_rate": 1.9552915104240067e-05, + "loss": 1.9913, + "step": 715 + }, + { + "epoch": 0.12287626565985928, + "grad_norm": 2.623549589919317, + "learning_rate": 1.955127020896206e-05, + "loss": 2.1435, + "step": 716 + }, + { + "epoch": 0.12304788055603226, + "grad_norm": 3.132919700835491, + "learning_rate": 1.954962236277001e-05, + "loss": 1.695, + "step": 717 + }, + { + "epoch": 0.12321949545220526, + "grad_norm": 2.913405218348578, + "learning_rate": 1.9547971566173025e-05, + "loss": 1.8305, + "step": 718 + }, + { + "epoch": 0.12339111034837824, + "grad_norm": 3.6656251904198123, + "learning_rate": 1.9546317819681128e-05, + "loss": 2.0613, + "step": 719 + }, + { + "epoch": 0.12356272524455122, + "grad_norm": 3.6722745723633623, + "learning_rate": 1.9544661123805245e-05, + "loss": 1.748, + "step": 720 + }, + { + "epoch": 0.12373434014072421, + "grad_norm": 2.828121884091384, + "learning_rate": 1.954300147905724e-05, + "loss": 1.9915, + "step": 721 + }, + { + "epoch": 0.1239059550368972, + "grad_norm": 2.8714512600803612, + "learning_rate": 1.9541338885949845e-05, + "loss": 1.848, + "step": 722 + }, + { + "epoch": 0.12407756993307019, + "grad_norm": 3.8996471448338035, + "learning_rate": 1.9539673344996744e-05, + "loss": 1.9279, + "step": 723 + }, + { + "epoch": 0.12424918482924317, + "grad_norm": 2.9536823996782346, + "learning_rate": 1.953800485671251e-05, + "loss": 1.7804, + "step": 724 + }, + { + "epoch": 0.12442079972541617, + "grad_norm": 4.7489942812156976, + "learning_rate": 1.9536333421612626e-05, + "loss": 1.7883, + "step": 725 + }, + { + "epoch": 0.12459241462158915, + "grad_norm": 3.2145611731803405, + "learning_rate": 1.9534659040213498e-05, + "loss": 1.8362, + "step": 726 + }, + { + "epoch": 0.12476402951776214, + "grad_norm": 4.454136603463675, + "learning_rate": 1.9532981713032432e-05, + "loss": 1.9277, + "step": 727 + }, + { + "epoch": 0.12493564441393513, + "grad_norm": 3.61009760856778, + "learning_rate": 1.953130144058764e-05, + "loss": 2.0101, + "step": 728 + }, + { + "epoch": 0.1251072593101081, + "grad_norm": 3.7323789053868346, + "learning_rate": 1.9529618223398258e-05, + "loss": 2.0167, + "step": 729 + }, + { + "epoch": 0.12527887420628112, + "grad_norm": 3.461211435335458, + "learning_rate": 1.9527932061984327e-05, + "loss": 1.9127, + "step": 730 + }, + { + "epoch": 0.1254504891024541, + "grad_norm": 2.976985533695183, + "learning_rate": 1.952624295686679e-05, + "loss": 1.694, + "step": 731 + }, + { + "epoch": 0.12562210399862708, + "grad_norm": 3.3575090676614496, + "learning_rate": 1.9524550908567505e-05, + "loss": 1.9081, + "step": 732 + }, + { + "epoch": 0.12579371889480007, + "grad_norm": 3.6069574862657485, + "learning_rate": 1.9522855917609243e-05, + "loss": 1.8363, + "step": 733 + }, + { + "epoch": 0.12596533379097305, + "grad_norm": 3.1836154152273406, + "learning_rate": 1.9521157984515672e-05, + "loss": 2.0397, + "step": 734 + }, + { + "epoch": 0.12613694868714603, + "grad_norm": 3.5354867259088816, + "learning_rate": 1.951945710981139e-05, + "loss": 1.9146, + "step": 735 + }, + { + "epoch": 0.12630856358331904, + "grad_norm": 2.812869580394354, + "learning_rate": 1.9517753294021876e-05, + "loss": 1.7738, + "step": 736 + }, + { + "epoch": 0.12648017847949203, + "grad_norm": 2.7574905789818307, + "learning_rate": 1.9516046537673543e-05, + "loss": 1.9723, + "step": 737 + }, + { + "epoch": 0.126651793375665, + "grad_norm": 2.56161869953953, + "learning_rate": 1.95143368412937e-05, + "loss": 1.8765, + "step": 738 + }, + { + "epoch": 0.126823408271838, + "grad_norm": 3.3025814410973964, + "learning_rate": 1.9512624205410568e-05, + "loss": 1.9226, + "step": 739 + }, + { + "epoch": 0.12699502316801098, + "grad_norm": 2.65364915839108, + "learning_rate": 1.951090863055327e-05, + "loss": 1.8904, + "step": 740 + }, + { + "epoch": 0.12716663806418396, + "grad_norm": 4.403950648327619, + "learning_rate": 1.9509190117251842e-05, + "loss": 1.8477, + "step": 741 + }, + { + "epoch": 0.12733825296035695, + "grad_norm": 2.7297214077424994, + "learning_rate": 1.950746866603723e-05, + "loss": 1.6244, + "step": 742 + }, + { + "epoch": 0.12750986785652996, + "grad_norm": 2.8623623789273953, + "learning_rate": 1.950574427744129e-05, + "loss": 1.8283, + "step": 743 + }, + { + "epoch": 0.12768148275270294, + "grad_norm": 2.776423003806021, + "learning_rate": 1.9504016951996774e-05, + "loss": 2.0191, + "step": 744 + }, + { + "epoch": 0.12785309764887592, + "grad_norm": 2.6959180765798445, + "learning_rate": 1.950228669023735e-05, + "loss": 1.6939, + "step": 745 + }, + { + "epoch": 0.1280247125450489, + "grad_norm": 2.8895795018010526, + "learning_rate": 1.9500553492697587e-05, + "loss": 1.7744, + "step": 746 + }, + { + "epoch": 0.1281963274412219, + "grad_norm": 2.959800013563544, + "learning_rate": 1.9498817359912973e-05, + "loss": 1.9661, + "step": 747 + }, + { + "epoch": 0.12836794233739487, + "grad_norm": 2.5285430397908923, + "learning_rate": 1.949707829241989e-05, + "loss": 1.7182, + "step": 748 + }, + { + "epoch": 0.12853955723356789, + "grad_norm": 4.091924561463437, + "learning_rate": 1.949533629075563e-05, + "loss": 2.043, + "step": 749 + }, + { + "epoch": 0.12871117212974087, + "grad_norm": 5.1680584893973585, + "learning_rate": 1.9493591355458404e-05, + "loss": 1.8322, + "step": 750 + }, + { + "epoch": 0.12888278702591385, + "grad_norm": 3.7773885086932535, + "learning_rate": 1.9491843487067305e-05, + "loss": 1.7617, + "step": 751 + }, + { + "epoch": 0.12905440192208684, + "grad_norm": 4.43437457571488, + "learning_rate": 1.9490092686122356e-05, + "loss": 1.8913, + "step": 752 + }, + { + "epoch": 0.12922601681825982, + "grad_norm": 4.634918982978317, + "learning_rate": 1.948833895316447e-05, + "loss": 2.0737, + "step": 753 + }, + { + "epoch": 0.1293976317144328, + "grad_norm": 3.5909650599406215, + "learning_rate": 1.9486582288735475e-05, + "loss": 2.0325, + "step": 754 + }, + { + "epoch": 0.1295692466106058, + "grad_norm": 4.173974377007036, + "learning_rate": 1.94848226933781e-05, + "loss": 1.6162, + "step": 755 + }, + { + "epoch": 0.1297408615067788, + "grad_norm": 3.5284936584114917, + "learning_rate": 1.9483060167635983e-05, + "loss": 2.0698, + "step": 756 + }, + { + "epoch": 0.12991247640295178, + "grad_norm": 3.4981107122899817, + "learning_rate": 1.9481294712053663e-05, + "loss": 1.7509, + "step": 757 + }, + { + "epoch": 0.13008409129912477, + "grad_norm": 2.5605155857801964, + "learning_rate": 1.947952632717659e-05, + "loss": 1.6244, + "step": 758 + }, + { + "epoch": 0.13025570619529775, + "grad_norm": 3.9763571740288612, + "learning_rate": 1.9477755013551106e-05, + "loss": 2.1433, + "step": 759 + }, + { + "epoch": 0.13042732109147073, + "grad_norm": 2.759892536779822, + "learning_rate": 1.947598077172448e-05, + "loss": 1.7429, + "step": 760 + }, + { + "epoch": 0.13059893598764372, + "grad_norm": 5.046535409841607, + "learning_rate": 1.9474203602244866e-05, + "loss": 2.0881, + "step": 761 + }, + { + "epoch": 0.13077055088381673, + "grad_norm": 4.706604367700716, + "learning_rate": 1.9472423505661332e-05, + "loss": 1.8053, + "step": 762 + }, + { + "epoch": 0.1309421657799897, + "grad_norm": 2.6280885019018974, + "learning_rate": 1.947064048252385e-05, + "loss": 2.0394, + "step": 763 + }, + { + "epoch": 0.1311137806761627, + "grad_norm": 3.014891797747624, + "learning_rate": 1.946885453338329e-05, + "loss": 1.7792, + "step": 764 + }, + { + "epoch": 0.13128539557233568, + "grad_norm": 2.3571690425386596, + "learning_rate": 1.946706565879143e-05, + "loss": 1.7023, + "step": 765 + }, + { + "epoch": 0.13145701046850866, + "grad_norm": 3.361034360801788, + "learning_rate": 1.9465273859300957e-05, + "loss": 1.7349, + "step": 766 + }, + { + "epoch": 0.13162862536468165, + "grad_norm": 2.5269476555856345, + "learning_rate": 1.946347913546545e-05, + "loss": 1.8084, + "step": 767 + }, + { + "epoch": 0.13180024026085463, + "grad_norm": 3.926594466583261, + "learning_rate": 1.9461681487839402e-05, + "loss": 2.0435, + "step": 768 + }, + { + "epoch": 0.13197185515702764, + "grad_norm": 2.8687039485104453, + "learning_rate": 1.945988091697821e-05, + "loss": 1.7041, + "step": 769 + }, + { + "epoch": 0.13214347005320062, + "grad_norm": 2.536793958815223, + "learning_rate": 1.9458077423438157e-05, + "loss": 1.7624, + "step": 770 + }, + { + "epoch": 0.1323150849493736, + "grad_norm": 2.3145146220422963, + "learning_rate": 1.9456271007776456e-05, + "loss": 1.96, + "step": 771 + }, + { + "epoch": 0.1324866998455466, + "grad_norm": 3.4557279192210286, + "learning_rate": 1.9454461670551198e-05, + "loss": 1.8301, + "step": 772 + }, + { + "epoch": 0.13265831474171957, + "grad_norm": 3.052242910129911, + "learning_rate": 1.945264941232139e-05, + "loss": 1.5877, + "step": 773 + }, + { + "epoch": 0.13282992963789256, + "grad_norm": 3.2937598733249565, + "learning_rate": 1.945083423364694e-05, + "loss": 1.8908, + "step": 774 + }, + { + "epoch": 0.13300154453406557, + "grad_norm": 8.45529425515086, + "learning_rate": 1.9449016135088657e-05, + "loss": 1.9223, + "step": 775 + }, + { + "epoch": 0.13317315943023855, + "grad_norm": 3.867949124556896, + "learning_rate": 1.944719511720825e-05, + "loss": 1.8183, + "step": 776 + }, + { + "epoch": 0.13334477432641154, + "grad_norm": 3.502460333702417, + "learning_rate": 1.9445371180568335e-05, + "loss": 2.1246, + "step": 777 + }, + { + "epoch": 0.13351638922258452, + "grad_norm": 2.8627687844724217, + "learning_rate": 1.944354432573242e-05, + "loss": 2.085, + "step": 778 + }, + { + "epoch": 0.1336880041187575, + "grad_norm": 2.916270177542365, + "learning_rate": 1.944171455326493e-05, + "loss": 1.9664, + "step": 779 + }, + { + "epoch": 0.1338596190149305, + "grad_norm": 5.241825710417449, + "learning_rate": 1.9439881863731177e-05, + "loss": 1.7361, + "step": 780 + }, + { + "epoch": 0.13403123391110347, + "grad_norm": 4.2172481694906745, + "learning_rate": 1.943804625769738e-05, + "loss": 1.8512, + "step": 781 + }, + { + "epoch": 0.13420284880727648, + "grad_norm": 2.602852498480944, + "learning_rate": 1.9436207735730658e-05, + "loss": 1.9726, + "step": 782 + }, + { + "epoch": 0.13437446370344946, + "grad_norm": 3.1822616104029664, + "learning_rate": 1.9434366298399037e-05, + "loss": 1.9234, + "step": 783 + }, + { + "epoch": 0.13454607859962245, + "grad_norm": 2.915442004940933, + "learning_rate": 1.9432521946271434e-05, + "loss": 1.894, + "step": 784 + }, + { + "epoch": 0.13471769349579543, + "grad_norm": 3.0864641358886624, + "learning_rate": 1.9430674679917672e-05, + "loss": 1.9447, + "step": 785 + }, + { + "epoch": 0.13488930839196842, + "grad_norm": 4.769603918101526, + "learning_rate": 1.9428824499908473e-05, + "loss": 1.8205, + "step": 786 + }, + { + "epoch": 0.1350609232881414, + "grad_norm": 3.1381352998719145, + "learning_rate": 1.9426971406815464e-05, + "loss": 1.8191, + "step": 787 + }, + { + "epoch": 0.1352325381843144, + "grad_norm": 3.5527016762460235, + "learning_rate": 1.942511540121116e-05, + "loss": 1.8117, + "step": 788 + }, + { + "epoch": 0.1354041530804874, + "grad_norm": 3.335609160798985, + "learning_rate": 1.942325648366899e-05, + "loss": 1.7949, + "step": 789 + }, + { + "epoch": 0.13557576797666038, + "grad_norm": 3.5589128617081522, + "learning_rate": 1.9421394654763275e-05, + "loss": 1.7756, + "step": 790 + }, + { + "epoch": 0.13574738287283336, + "grad_norm": 3.3931035007393, + "learning_rate": 1.9419529915069234e-05, + "loss": 1.9588, + "step": 791 + }, + { + "epoch": 0.13591899776900634, + "grad_norm": 2.732770910885144, + "learning_rate": 1.9417662265162988e-05, + "loss": 1.8161, + "step": 792 + }, + { + "epoch": 0.13609061266517933, + "grad_norm": 2.8714399790740592, + "learning_rate": 1.941579170562156e-05, + "loss": 2.0133, + "step": 793 + }, + { + "epoch": 0.1362622275613523, + "grad_norm": 2.8201340231038565, + "learning_rate": 1.9413918237022865e-05, + "loss": 1.9192, + "step": 794 + }, + { + "epoch": 0.13643384245752532, + "grad_norm": 2.8829937524306026, + "learning_rate": 1.9412041859945723e-05, + "loss": 1.8748, + "step": 795 + }, + { + "epoch": 0.1366054573536983, + "grad_norm": 2.6121298207375196, + "learning_rate": 1.941016257496985e-05, + "loss": 1.7875, + "step": 796 + }, + { + "epoch": 0.1367770722498713, + "grad_norm": 3.7901313395030893, + "learning_rate": 1.9408280382675862e-05, + "loss": 2.0532, + "step": 797 + }, + { + "epoch": 0.13694868714604427, + "grad_norm": 2.845924585151732, + "learning_rate": 1.9406395283645273e-05, + "loss": 1.7259, + "step": 798 + }, + { + "epoch": 0.13712030204221726, + "grad_norm": 2.5725454432234596, + "learning_rate": 1.9404507278460492e-05, + "loss": 1.9039, + "step": 799 + }, + { + "epoch": 0.13729191693839024, + "grad_norm": 2.8917593842067366, + "learning_rate": 1.940261636770483e-05, + "loss": 2.061, + "step": 800 + }, + { + "epoch": 0.13746353183456325, + "grad_norm": 2.1796790945538778, + "learning_rate": 1.9400722551962487e-05, + "loss": 1.697, + "step": 801 + }, + { + "epoch": 0.13763514673073624, + "grad_norm": 2.763119759861247, + "learning_rate": 1.939882583181857e-05, + "loss": 1.5279, + "step": 802 + }, + { + "epoch": 0.13780676162690922, + "grad_norm": 3.682365440487759, + "learning_rate": 1.9396926207859085e-05, + "loss": 1.814, + "step": 803 + }, + { + "epoch": 0.1379783765230822, + "grad_norm": 3.162536467592949, + "learning_rate": 1.939502368067093e-05, + "loss": 1.9678, + "step": 804 + }, + { + "epoch": 0.13814999141925519, + "grad_norm": 3.143446298871448, + "learning_rate": 1.9393118250841897e-05, + "loss": 2.0622, + "step": 805 + }, + { + "epoch": 0.13832160631542817, + "grad_norm": 2.3456851919733004, + "learning_rate": 1.9391209918960677e-05, + "loss": 1.7169, + "step": 806 + }, + { + "epoch": 0.13849322121160118, + "grad_norm": 2.2080044222294903, + "learning_rate": 1.9389298685616866e-05, + "loss": 1.7335, + "step": 807 + }, + { + "epoch": 0.13866483610777416, + "grad_norm": 2.3748221133733436, + "learning_rate": 1.9387384551400942e-05, + "loss": 1.8508, + "step": 808 + }, + { + "epoch": 0.13883645100394715, + "grad_norm": 3.4599546197719966, + "learning_rate": 1.9385467516904288e-05, + "loss": 1.7573, + "step": 809 + }, + { + "epoch": 0.13900806590012013, + "grad_norm": 2.823313781110258, + "learning_rate": 1.9383547582719186e-05, + "loss": 1.9186, + "step": 810 + }, + { + "epoch": 0.13917968079629311, + "grad_norm": 2.7898428340211727, + "learning_rate": 1.9381624749438807e-05, + "loss": 1.6144, + "step": 811 + }, + { + "epoch": 0.1393512956924661, + "grad_norm": 3.014886138789283, + "learning_rate": 1.937969901765722e-05, + "loss": 1.947, + "step": 812 + }, + { + "epoch": 0.13952291058863908, + "grad_norm": 2.365393743197819, + "learning_rate": 1.9377770387969388e-05, + "loss": 2.0048, + "step": 813 + }, + { + "epoch": 0.1396945254848121, + "grad_norm": 2.862184098927013, + "learning_rate": 1.937583886097117e-05, + "loss": 1.6929, + "step": 814 + }, + { + "epoch": 0.13986614038098508, + "grad_norm": 5.177811782665515, + "learning_rate": 1.937390443725933e-05, + "loss": 2.0268, + "step": 815 + }, + { + "epoch": 0.14003775527715806, + "grad_norm": 2.6597735151341095, + "learning_rate": 1.9371967117431513e-05, + "loss": 1.9714, + "step": 816 + }, + { + "epoch": 0.14020937017333104, + "grad_norm": 2.537487351793122, + "learning_rate": 1.937002690208626e-05, + "loss": 1.8663, + "step": 817 + }, + { + "epoch": 0.14038098506950403, + "grad_norm": 2.5155501706236736, + "learning_rate": 1.9368083791823013e-05, + "loss": 1.9868, + "step": 818 + }, + { + "epoch": 0.140552599965677, + "grad_norm": 2.70501004307245, + "learning_rate": 1.9366137787242107e-05, + "loss": 1.8901, + "step": 819 + }, + { + "epoch": 0.14072421486185002, + "grad_norm": 2.7554807612409076, + "learning_rate": 1.936418888894477e-05, + "loss": 1.7269, + "step": 820 + }, + { + "epoch": 0.140895829758023, + "grad_norm": 2.8951157418624254, + "learning_rate": 1.9362237097533123e-05, + "loss": 1.8491, + "step": 821 + }, + { + "epoch": 0.141067444654196, + "grad_norm": 2.685476353499669, + "learning_rate": 1.9360282413610185e-05, + "loss": 1.8163, + "step": 822 + }, + { + "epoch": 0.14123905955036897, + "grad_norm": 2.914179530331329, + "learning_rate": 1.9358324837779864e-05, + "loss": 1.7363, + "step": 823 + }, + { + "epoch": 0.14141067444654196, + "grad_norm": 2.605303386454893, + "learning_rate": 1.935636437064696e-05, + "loss": 1.7258, + "step": 824 + }, + { + "epoch": 0.14158228934271494, + "grad_norm": 2.8321319406795085, + "learning_rate": 1.9354401012817172e-05, + "loss": 2.0022, + "step": 825 + }, + { + "epoch": 0.14175390423888792, + "grad_norm": 3.0643757031727996, + "learning_rate": 1.935243476489709e-05, + "loss": 2.0587, + "step": 826 + }, + { + "epoch": 0.14192551913506093, + "grad_norm": 3.2031184109178086, + "learning_rate": 1.9350465627494196e-05, + "loss": 1.651, + "step": 827 + }, + { + "epoch": 0.14209713403123392, + "grad_norm": 4.370719302377024, + "learning_rate": 1.9348493601216864e-05, + "loss": 1.8206, + "step": 828 + }, + { + "epoch": 0.1422687489274069, + "grad_norm": 4.842158211369917, + "learning_rate": 1.9346518686674365e-05, + "loss": 2.2634, + "step": 829 + }, + { + "epoch": 0.14244036382357989, + "grad_norm": 3.76928527886839, + "learning_rate": 1.9344540884476856e-05, + "loss": 2.0082, + "step": 830 + }, + { + "epoch": 0.14261197871975287, + "grad_norm": 2.762609253767019, + "learning_rate": 1.9342560195235386e-05, + "loss": 1.8976, + "step": 831 + }, + { + "epoch": 0.14278359361592585, + "grad_norm": 2.410302542310939, + "learning_rate": 1.9340576619561907e-05, + "loss": 1.9664, + "step": 832 + }, + { + "epoch": 0.14295520851209886, + "grad_norm": 3.7311121338365396, + "learning_rate": 1.933859015806925e-05, + "loss": 1.6968, + "step": 833 + }, + { + "epoch": 0.14312682340827185, + "grad_norm": 5.519498021808017, + "learning_rate": 1.9336600811371143e-05, + "loss": 1.8068, + "step": 834 + }, + { + "epoch": 0.14329843830444483, + "grad_norm": 3.9452926697799198, + "learning_rate": 1.9334608580082204e-05, + "loss": 1.9151, + "step": 835 + }, + { + "epoch": 0.14347005320061781, + "grad_norm": 2.47590519165114, + "learning_rate": 1.9332613464817947e-05, + "loss": 2.1557, + "step": 836 + }, + { + "epoch": 0.1436416680967908, + "grad_norm": 2.2773478151251596, + "learning_rate": 1.933061546619477e-05, + "loss": 1.8475, + "step": 837 + }, + { + "epoch": 0.14381328299296378, + "grad_norm": 3.0465913365439588, + "learning_rate": 1.9328614584829963e-05, + "loss": 1.5076, + "step": 838 + }, + { + "epoch": 0.14398489788913676, + "grad_norm": 3.1285160686984628, + "learning_rate": 1.9326610821341713e-05, + "loss": 1.9832, + "step": 839 + }, + { + "epoch": 0.14415651278530978, + "grad_norm": 2.7302090464369035, + "learning_rate": 1.932460417634909e-05, + "loss": 1.893, + "step": 840 + }, + { + "epoch": 0.14432812768148276, + "grad_norm": 2.9357521601780316, + "learning_rate": 1.932259465047206e-05, + "loss": 1.9015, + "step": 841 + }, + { + "epoch": 0.14449974257765574, + "grad_norm": 2.932565203055729, + "learning_rate": 1.9320582244331475e-05, + "loss": 1.9993, + "step": 842 + }, + { + "epoch": 0.14467135747382873, + "grad_norm": 2.5392560938244033, + "learning_rate": 1.9318566958549075e-05, + "loss": 1.8044, + "step": 843 + }, + { + "epoch": 0.1448429723700017, + "grad_norm": 2.931105428412681, + "learning_rate": 1.9316548793747495e-05, + "loss": 1.6959, + "step": 844 + }, + { + "epoch": 0.1450145872661747, + "grad_norm": 3.076745313072229, + "learning_rate": 1.9314527750550263e-05, + "loss": 2.0188, + "step": 845 + }, + { + "epoch": 0.1451862021623477, + "grad_norm": 3.1249281361631684, + "learning_rate": 1.931250382958178e-05, + "loss": 1.863, + "step": 846 + }, + { + "epoch": 0.1453578170585207, + "grad_norm": 3.425673091348935, + "learning_rate": 1.9310477031467357e-05, + "loss": 1.9239, + "step": 847 + }, + { + "epoch": 0.14552943195469367, + "grad_norm": 3.1376131163028655, + "learning_rate": 1.930844735683318e-05, + "loss": 2.0033, + "step": 848 + }, + { + "epoch": 0.14570104685086666, + "grad_norm": 2.865218109427285, + "learning_rate": 1.9306414806306323e-05, + "loss": 1.6858, + "step": 849 + }, + { + "epoch": 0.14587266174703964, + "grad_norm": 2.836261917489325, + "learning_rate": 1.930437938051476e-05, + "loss": 2.0244, + "step": 850 + }, + { + "epoch": 0.14604427664321262, + "grad_norm": 2.4022066479421054, + "learning_rate": 1.930234108008734e-05, + "loss": 1.7304, + "step": 851 + }, + { + "epoch": 0.1462158915393856, + "grad_norm": 2.9197060396009182, + "learning_rate": 1.9300299905653813e-05, + "loss": 1.7783, + "step": 852 + }, + { + "epoch": 0.14638750643555862, + "grad_norm": 3.5324989987112168, + "learning_rate": 1.9298255857844803e-05, + "loss": 2.1322, + "step": 853 + }, + { + "epoch": 0.1465591213317316, + "grad_norm": 2.6958341222475983, + "learning_rate": 1.9296208937291835e-05, + "loss": 1.8372, + "step": 854 + }, + { + "epoch": 0.14673073622790458, + "grad_norm": 3.279463392162738, + "learning_rate": 1.929415914462731e-05, + "loss": 1.9398, + "step": 855 + }, + { + "epoch": 0.14690235112407757, + "grad_norm": 2.9746504195005925, + "learning_rate": 1.9292106480484533e-05, + "loss": 1.9512, + "step": 856 + }, + { + "epoch": 0.14707396602025055, + "grad_norm": 2.599058511769964, + "learning_rate": 1.9290050945497668e-05, + "loss": 1.7489, + "step": 857 + }, + { + "epoch": 0.14724558091642354, + "grad_norm": 2.4030634214006206, + "learning_rate": 1.9287992540301802e-05, + "loss": 1.7582, + "step": 858 + }, + { + "epoch": 0.14741719581259655, + "grad_norm": 3.827757116809412, + "learning_rate": 1.9285931265532875e-05, + "loss": 1.7235, + "step": 859 + }, + { + "epoch": 0.14758881070876953, + "grad_norm": 2.7678919341185644, + "learning_rate": 1.9283867121827736e-05, + "loss": 1.7356, + "step": 860 + }, + { + "epoch": 0.1477604256049425, + "grad_norm": 3.2867412465832855, + "learning_rate": 1.928180010982411e-05, + "loss": 1.9071, + "step": 861 + }, + { + "epoch": 0.1479320405011155, + "grad_norm": 2.656270649072383, + "learning_rate": 1.927973023016061e-05, + "loss": 1.911, + "step": 862 + }, + { + "epoch": 0.14810365539728848, + "grad_norm": 2.434197098758412, + "learning_rate": 1.9277657483476736e-05, + "loss": 1.6403, + "step": 863 + }, + { + "epoch": 0.14827527029346146, + "grad_norm": 2.8058482059718233, + "learning_rate": 1.9275581870412883e-05, + "loss": 1.863, + "step": 864 + }, + { + "epoch": 0.14844688518963445, + "grad_norm": 3.230606706076792, + "learning_rate": 1.9273503391610307e-05, + "loss": 1.8928, + "step": 865 + }, + { + "epoch": 0.14861850008580746, + "grad_norm": 3.0741959612497345, + "learning_rate": 1.927142204771118e-05, + "loss": 1.9563, + "step": 866 + }, + { + "epoch": 0.14879011498198044, + "grad_norm": 2.4980609904025406, + "learning_rate": 1.926933783935853e-05, + "loss": 1.9163, + "step": 867 + }, + { + "epoch": 0.14896172987815343, + "grad_norm": 3.357732561087617, + "learning_rate": 1.926725076719629e-05, + "loss": 1.7384, + "step": 868 + }, + { + "epoch": 0.1491333447743264, + "grad_norm": 2.911414826424691, + "learning_rate": 1.9265160831869276e-05, + "loss": 1.9063, + "step": 869 + }, + { + "epoch": 0.1493049596704994, + "grad_norm": 2.2814824068799466, + "learning_rate": 1.926306803402318e-05, + "loss": 2.0599, + "step": 870 + }, + { + "epoch": 0.14947657456667238, + "grad_norm": 2.0531841217194304, + "learning_rate": 1.9260972374304582e-05, + "loss": 1.6243, + "step": 871 + }, + { + "epoch": 0.1496481894628454, + "grad_norm": 2.4565323616356896, + "learning_rate": 1.9258873853360946e-05, + "loss": 1.6371, + "step": 872 + }, + { + "epoch": 0.14981980435901837, + "grad_norm": 2.5500894867398007, + "learning_rate": 1.9256772471840627e-05, + "loss": 1.983, + "step": 873 + }, + { + "epoch": 0.14999141925519135, + "grad_norm": 3.018479297539399, + "learning_rate": 1.925466823039285e-05, + "loss": 1.8799, + "step": 874 + }, + { + "epoch": 0.15016303415136434, + "grad_norm": 3.3599963072443138, + "learning_rate": 1.9252561129667736e-05, + "loss": 1.9047, + "step": 875 + }, + { + "epoch": 0.15033464904753732, + "grad_norm": 2.876939590060346, + "learning_rate": 1.925045117031628e-05, + "loss": 1.8091, + "step": 876 + }, + { + "epoch": 0.1505062639437103, + "grad_norm": 4.587035200278965, + "learning_rate": 1.924833835299037e-05, + "loss": 1.8766, + "step": 877 + }, + { + "epoch": 0.1506778788398833, + "grad_norm": 3.2603079675805917, + "learning_rate": 1.924622267834277e-05, + "loss": 1.818, + "step": 878 + }, + { + "epoch": 0.1508494937360563, + "grad_norm": 2.914222052871642, + "learning_rate": 1.9244104147027127e-05, + "loss": 1.9931, + "step": 879 + }, + { + "epoch": 0.15102110863222928, + "grad_norm": 2.869009191272744, + "learning_rate": 1.9241982759697976e-05, + "loss": 1.8678, + "step": 880 + }, + { + "epoch": 0.15119272352840227, + "grad_norm": 3.760143196914323, + "learning_rate": 1.9239858517010724e-05, + "loss": 1.8291, + "step": 881 + }, + { + "epoch": 0.15136433842457525, + "grad_norm": 2.870117971628503, + "learning_rate": 1.9237731419621672e-05, + "loss": 1.8694, + "step": 882 + }, + { + "epoch": 0.15153595332074823, + "grad_norm": 5.117646025360141, + "learning_rate": 1.9235601468187997e-05, + "loss": 1.7764, + "step": 883 + }, + { + "epoch": 0.15170756821692122, + "grad_norm": 2.034057626859885, + "learning_rate": 1.9233468663367755e-05, + "loss": 1.8281, + "step": 884 + }, + { + "epoch": 0.15187918311309423, + "grad_norm": 1.9387544199806614, + "learning_rate": 1.9231333005819893e-05, + "loss": 1.5317, + "step": 885 + }, + { + "epoch": 0.1520507980092672, + "grad_norm": 2.3356128278812727, + "learning_rate": 1.9229194496204226e-05, + "loss": 1.9282, + "step": 886 + }, + { + "epoch": 0.1522224129054402, + "grad_norm": 2.7405137688083383, + "learning_rate": 1.9227053135181467e-05, + "loss": 1.8164, + "step": 887 + }, + { + "epoch": 0.15239402780161318, + "grad_norm": 3.214610331040803, + "learning_rate": 1.922490892341319e-05, + "loss": 1.7349, + "step": 888 + }, + { + "epoch": 0.15256564269778616, + "grad_norm": 3.465707142240606, + "learning_rate": 1.9222761861561868e-05, + "loss": 1.7254, + "step": 889 + }, + { + "epoch": 0.15273725759395915, + "grad_norm": 3.4556275043365927, + "learning_rate": 1.9220611950290845e-05, + "loss": 1.8544, + "step": 890 + }, + { + "epoch": 0.15290887249013213, + "grad_norm": 3.4041490208093257, + "learning_rate": 1.9218459190264346e-05, + "loss": 1.829, + "step": 891 + }, + { + "epoch": 0.15308048738630514, + "grad_norm": 2.5276474455165263, + "learning_rate": 1.9216303582147475e-05, + "loss": 1.7018, + "step": 892 + }, + { + "epoch": 0.15325210228247813, + "grad_norm": 2.4042077778658797, + "learning_rate": 1.9214145126606224e-05, + "loss": 1.7764, + "step": 893 + }, + { + "epoch": 0.1534237171786511, + "grad_norm": 2.7129688878164537, + "learning_rate": 1.9211983824307455e-05, + "loss": 1.5837, + "step": 894 + }, + { + "epoch": 0.1535953320748241, + "grad_norm": 3.2627235950578886, + "learning_rate": 1.920981967591891e-05, + "loss": 2.0954, + "step": 895 + }, + { + "epoch": 0.15376694697099708, + "grad_norm": 2.7252865505248867, + "learning_rate": 1.9207652682109223e-05, + "loss": 1.7785, + "step": 896 + }, + { + "epoch": 0.15393856186717006, + "grad_norm": 2.8096998910455153, + "learning_rate": 1.920548284354789e-05, + "loss": 1.7101, + "step": 897 + }, + { + "epoch": 0.15411017676334307, + "grad_norm": 2.537356504744846, + "learning_rate": 1.9203310160905296e-05, + "loss": 2.1799, + "step": 898 + }, + { + "epoch": 0.15428179165951605, + "grad_norm": 2.9497464860444316, + "learning_rate": 1.9201134634852706e-05, + "loss": 1.7241, + "step": 899 + }, + { + "epoch": 0.15445340655568904, + "grad_norm": 5.570811784311937, + "learning_rate": 1.9198956266062258e-05, + "loss": 1.8488, + "step": 900 + }, + { + "epoch": 0.15462502145186202, + "grad_norm": 2.9716800160992327, + "learning_rate": 1.9196775055206965e-05, + "loss": 2.1307, + "step": 901 + }, + { + "epoch": 0.154796636348035, + "grad_norm": 2.620842956655345, + "learning_rate": 1.9194591002960732e-05, + "loss": 1.9544, + "step": 902 + }, + { + "epoch": 0.154968251244208, + "grad_norm": 3.3286638067892254, + "learning_rate": 1.9192404109998325e-05, + "loss": 1.8323, + "step": 903 + }, + { + "epoch": 0.15513986614038097, + "grad_norm": 2.5659862954745294, + "learning_rate": 1.91902143769954e-05, + "loss": 1.9745, + "step": 904 + }, + { + "epoch": 0.15531148103655398, + "grad_norm": 2.661306312749646, + "learning_rate": 1.9188021804628486e-05, + "loss": 1.6669, + "step": 905 + }, + { + "epoch": 0.15548309593272697, + "grad_norm": 3.308562108860248, + "learning_rate": 1.9185826393574994e-05, + "loss": 1.9903, + "step": 906 + }, + { + "epoch": 0.15565471082889995, + "grad_norm": 2.9098122788690106, + "learning_rate": 1.9183628144513196e-05, + "loss": 1.6804, + "step": 907 + }, + { + "epoch": 0.15582632572507293, + "grad_norm": 3.728510596350371, + "learning_rate": 1.9181427058122265e-05, + "loss": 1.7867, + "step": 908 + }, + { + "epoch": 0.15599794062124592, + "grad_norm": 3.7122472196052425, + "learning_rate": 1.9179223135082226e-05, + "loss": 1.9655, + "step": 909 + }, + { + "epoch": 0.1561695555174189, + "grad_norm": 2.958838137311516, + "learning_rate": 1.9177016376074006e-05, + "loss": 1.7651, + "step": 910 + }, + { + "epoch": 0.1563411704135919, + "grad_norm": 3.026374820962408, + "learning_rate": 1.9174806781779384e-05, + "loss": 1.8354, + "step": 911 + }, + { + "epoch": 0.1565127853097649, + "grad_norm": 4.492953610295223, + "learning_rate": 1.9172594352881028e-05, + "loss": 1.9504, + "step": 912 + }, + { + "epoch": 0.15668440020593788, + "grad_norm": 2.900144753968202, + "learning_rate": 1.917037909006248e-05, + "loss": 2.048, + "step": 913 + }, + { + "epoch": 0.15685601510211086, + "grad_norm": 2.6953935878760378, + "learning_rate": 1.9168160994008157e-05, + "loss": 1.8892, + "step": 914 + }, + { + "epoch": 0.15702762999828385, + "grad_norm": 4.945101883397236, + "learning_rate": 1.916594006540335e-05, + "loss": 1.8452, + "step": 915 + }, + { + "epoch": 0.15719924489445683, + "grad_norm": 2.8266649868888125, + "learning_rate": 1.9163716304934227e-05, + "loss": 1.759, + "step": 916 + }, + { + "epoch": 0.1573708597906298, + "grad_norm": 2.2645644715819957, + "learning_rate": 1.916148971328783e-05, + "loss": 1.8898, + "step": 917 + }, + { + "epoch": 0.15754247468680282, + "grad_norm": 2.55348016259605, + "learning_rate": 1.9159260291152077e-05, + "loss": 1.8977, + "step": 918 + }, + { + "epoch": 0.1577140895829758, + "grad_norm": 2.4832091991606204, + "learning_rate": 1.915702803921576e-05, + "loss": 1.8246, + "step": 919 + }, + { + "epoch": 0.1578857044791488, + "grad_norm": 2.4629536181266203, + "learning_rate": 1.9154792958168542e-05, + "loss": 1.9943, + "step": 920 + }, + { + "epoch": 0.15805731937532178, + "grad_norm": 3.0165092747286177, + "learning_rate": 1.9152555048700966e-05, + "loss": 1.9152, + "step": 921 + }, + { + "epoch": 0.15822893427149476, + "grad_norm": 2.391542522749986, + "learning_rate": 1.9150314311504444e-05, + "loss": 1.9288, + "step": 922 + }, + { + "epoch": 0.15840054916766774, + "grad_norm": 4.100846860532991, + "learning_rate": 1.9148070747271262e-05, + "loss": 1.7039, + "step": 923 + }, + { + "epoch": 0.15857216406384075, + "grad_norm": 3.2340100229029347, + "learning_rate": 1.914582435669458e-05, + "loss": 2.0645, + "step": 924 + }, + { + "epoch": 0.15874377896001374, + "grad_norm": 3.4940397651555535, + "learning_rate": 1.914357514046844e-05, + "loss": 1.8363, + "step": 925 + }, + { + "epoch": 0.15891539385618672, + "grad_norm": 2.4999340289881244, + "learning_rate": 1.9141323099287737e-05, + "loss": 1.8756, + "step": 926 + }, + { + "epoch": 0.1590870087523597, + "grad_norm": 2.9114026610321053, + "learning_rate": 1.913906823384826e-05, + "loss": 1.8852, + "step": 927 + }, + { + "epoch": 0.1592586236485327, + "grad_norm": 3.114045024461276, + "learning_rate": 1.913681054484666e-05, + "loss": 1.8964, + "step": 928 + }, + { + "epoch": 0.15943023854470567, + "grad_norm": 3.0381278250794588, + "learning_rate": 1.9134550032980456e-05, + "loss": 1.8356, + "step": 929 + }, + { + "epoch": 0.15960185344087865, + "grad_norm": 3.5818913304011235, + "learning_rate": 1.913228669894805e-05, + "loss": 1.7601, + "step": 930 + }, + { + "epoch": 0.15977346833705167, + "grad_norm": 3.1344752832343414, + "learning_rate": 1.9130020543448705e-05, + "loss": 1.9302, + "step": 931 + }, + { + "epoch": 0.15994508323322465, + "grad_norm": 3.0816071140514105, + "learning_rate": 1.9127751567182568e-05, + "loss": 1.8898, + "step": 932 + }, + { + "epoch": 0.16011669812939763, + "grad_norm": 3.017305791318649, + "learning_rate": 1.9125479770850646e-05, + "loss": 1.7578, + "step": 933 + }, + { + "epoch": 0.16028831302557062, + "grad_norm": 2.906786195900609, + "learning_rate": 1.9123205155154827e-05, + "loss": 2.0959, + "step": 934 + }, + { + "epoch": 0.1604599279217436, + "grad_norm": 4.058030755809474, + "learning_rate": 1.912092772079786e-05, + "loss": 1.9833, + "step": 935 + }, + { + "epoch": 0.16063154281791658, + "grad_norm": 2.722762915083046, + "learning_rate": 1.9118647468483375e-05, + "loss": 1.7963, + "step": 936 + }, + { + "epoch": 0.1608031577140896, + "grad_norm": 3.4478066449412546, + "learning_rate": 1.9116364398915866e-05, + "loss": 1.7697, + "step": 937 + }, + { + "epoch": 0.16097477261026258, + "grad_norm": 3.8843306634289583, + "learning_rate": 1.9114078512800692e-05, + "loss": 1.751, + "step": 938 + }, + { + "epoch": 0.16114638750643556, + "grad_norm": 2.619121463614514, + "learning_rate": 1.9111789810844104e-05, + "loss": 1.7204, + "step": 939 + }, + { + "epoch": 0.16131800240260855, + "grad_norm": 3.1020226140291887, + "learning_rate": 1.9109498293753196e-05, + "loss": 1.7465, + "step": 940 + }, + { + "epoch": 0.16148961729878153, + "grad_norm": 2.816955507887721, + "learning_rate": 1.9107203962235947e-05, + "loss": 1.6794, + "step": 941 + }, + { + "epoch": 0.1616612321949545, + "grad_norm": 3.2824724213697447, + "learning_rate": 1.91049068170012e-05, + "loss": 1.7686, + "step": 942 + }, + { + "epoch": 0.1618328470911275, + "grad_norm": 2.297484792157275, + "learning_rate": 1.910260685875868e-05, + "loss": 1.6778, + "step": 943 + }, + { + "epoch": 0.1620044619873005, + "grad_norm": 2.663356348667494, + "learning_rate": 1.9100304088218964e-05, + "loss": 1.6639, + "step": 944 + }, + { + "epoch": 0.1621760768834735, + "grad_norm": 3.2511637087509366, + "learning_rate": 1.9097998506093503e-05, + "loss": 1.7046, + "step": 945 + }, + { + "epoch": 0.16234769177964647, + "grad_norm": 2.7278840599373817, + "learning_rate": 1.909569011309462e-05, + "loss": 1.8123, + "step": 946 + }, + { + "epoch": 0.16251930667581946, + "grad_norm": 3.414306504376067, + "learning_rate": 1.9093378909935503e-05, + "loss": 1.8098, + "step": 947 + }, + { + "epoch": 0.16269092157199244, + "grad_norm": 3.2035688150224404, + "learning_rate": 1.9091064897330213e-05, + "loss": 1.8737, + "step": 948 + }, + { + "epoch": 0.16286253646816543, + "grad_norm": 2.836132541693522, + "learning_rate": 1.908874807599368e-05, + "loss": 2.0398, + "step": 949 + }, + { + "epoch": 0.16303415136433844, + "grad_norm": 3.1036563643922706, + "learning_rate": 1.908642844664169e-05, + "loss": 1.8861, + "step": 950 + }, + { + "epoch": 0.16320576626051142, + "grad_norm": 2.983245282448459, + "learning_rate": 1.9084106009990905e-05, + "loss": 1.7373, + "step": 951 + }, + { + "epoch": 0.1633773811566844, + "grad_norm": 6.702190132656186, + "learning_rate": 1.9081780766758862e-05, + "loss": 1.7819, + "step": 952 + }, + { + "epoch": 0.1635489960528574, + "grad_norm": 5.130541009613373, + "learning_rate": 1.9079452717663946e-05, + "loss": 2.0151, + "step": 953 + }, + { + "epoch": 0.16372061094903037, + "grad_norm": 3.410438731271111, + "learning_rate": 1.9077121863425428e-05, + "loss": 1.7795, + "step": 954 + }, + { + "epoch": 0.16389222584520335, + "grad_norm": 2.976219591231255, + "learning_rate": 1.9074788204763438e-05, + "loss": 1.917, + "step": 955 + }, + { + "epoch": 0.16406384074137634, + "grad_norm": 3.2954198967420467, + "learning_rate": 1.907245174239896e-05, + "loss": 1.9899, + "step": 956 + }, + { + "epoch": 0.16423545563754935, + "grad_norm": 3.4173110130146602, + "learning_rate": 1.9070112477053874e-05, + "loss": 1.9594, + "step": 957 + }, + { + "epoch": 0.16440707053372233, + "grad_norm": 2.714965944147371, + "learning_rate": 1.906777040945089e-05, + "loss": 1.6845, + "step": 958 + }, + { + "epoch": 0.16457868542989532, + "grad_norm": 3.653926983431517, + "learning_rate": 1.9065425540313613e-05, + "loss": 1.9838, + "step": 959 + }, + { + "epoch": 0.1647503003260683, + "grad_norm": 3.5435716850442054, + "learning_rate": 1.9063077870366504e-05, + "loss": 2.0632, + "step": 960 + }, + { + "epoch": 0.16492191522224128, + "grad_norm": 2.7207032706526957, + "learning_rate": 1.9060727400334875e-05, + "loss": 1.8097, + "step": 961 + }, + { + "epoch": 0.16509353011841427, + "grad_norm": 2.892369982518829, + "learning_rate": 1.905837413094493e-05, + "loss": 1.9241, + "step": 962 + }, + { + "epoch": 0.16526514501458728, + "grad_norm": 3.6079034976895206, + "learning_rate": 1.9056018062923717e-05, + "loss": 1.7801, + "step": 963 + }, + { + "epoch": 0.16543675991076026, + "grad_norm": 3.1529193450761888, + "learning_rate": 1.9053659196999157e-05, + "loss": 2.0072, + "step": 964 + }, + { + "epoch": 0.16560837480693325, + "grad_norm": 2.6680618292760654, + "learning_rate": 1.9051297533900032e-05, + "loss": 1.58, + "step": 965 + }, + { + "epoch": 0.16577998970310623, + "grad_norm": 2.7497719371609786, + "learning_rate": 1.9048933074355988e-05, + "loss": 1.8281, + "step": 966 + }, + { + "epoch": 0.1659516045992792, + "grad_norm": 2.6238643035791043, + "learning_rate": 1.9046565819097546e-05, + "loss": 1.5967, + "step": 967 + }, + { + "epoch": 0.1661232194954522, + "grad_norm": 3.805257049881217, + "learning_rate": 1.9044195768856068e-05, + "loss": 2.084, + "step": 968 + }, + { + "epoch": 0.16629483439162518, + "grad_norm": 3.6411979141620403, + "learning_rate": 1.9041822924363808e-05, + "loss": 2.1312, + "step": 969 + }, + { + "epoch": 0.1664664492877982, + "grad_norm": 3.6989042208560092, + "learning_rate": 1.9039447286353855e-05, + "loss": 1.9246, + "step": 970 + }, + { + "epoch": 0.16663806418397117, + "grad_norm": 2.952442496832039, + "learning_rate": 1.9037068855560184e-05, + "loss": 1.9249, + "step": 971 + }, + { + "epoch": 0.16680967908014416, + "grad_norm": 2.947003125909465, + "learning_rate": 1.903468763271762e-05, + "loss": 1.8068, + "step": 972 + }, + { + "epoch": 0.16698129397631714, + "grad_norm": 2.9669996228262083, + "learning_rate": 1.9032303618561855e-05, + "loss": 1.6313, + "step": 973 + }, + { + "epoch": 0.16715290887249012, + "grad_norm": 3.851865592926603, + "learning_rate": 1.9029916813829438e-05, + "loss": 1.9488, + "step": 974 + }, + { + "epoch": 0.1673245237686631, + "grad_norm": 4.1352847565813065, + "learning_rate": 1.902752721925779e-05, + "loss": 1.6762, + "step": 975 + }, + { + "epoch": 0.16749613866483612, + "grad_norm": 4.25371793127814, + "learning_rate": 1.9025134835585187e-05, + "loss": 2.0314, + "step": 976 + }, + { + "epoch": 0.1676677535610091, + "grad_norm": 2.9739894442464103, + "learning_rate": 1.902273966355077e-05, + "loss": 1.9101, + "step": 977 + }, + { + "epoch": 0.1678393684571821, + "grad_norm": 2.63201072002799, + "learning_rate": 1.9020341703894537e-05, + "loss": 1.4468, + "step": 978 + }, + { + "epoch": 0.16801098335335507, + "grad_norm": 2.739177288565023, + "learning_rate": 1.901794095735735e-05, + "loss": 1.7929, + "step": 979 + }, + { + "epoch": 0.16818259824952805, + "grad_norm": 3.045465062594323, + "learning_rate": 1.9015537424680933e-05, + "loss": 1.7605, + "step": 980 + }, + { + "epoch": 0.16835421314570104, + "grad_norm": 2.4326117471284188, + "learning_rate": 1.901313110660787e-05, + "loss": 1.7179, + "step": 981 + }, + { + "epoch": 0.16852582804187405, + "grad_norm": 2.4453337083070497, + "learning_rate": 1.9010722003881604e-05, + "loss": 1.6435, + "step": 982 + }, + { + "epoch": 0.16869744293804703, + "grad_norm": 4.61559448035971, + "learning_rate": 1.900831011724644e-05, + "loss": 2.024, + "step": 983 + }, + { + "epoch": 0.16886905783422002, + "grad_norm": 2.8038211548060734, + "learning_rate": 1.9005895447447543e-05, + "loss": 1.7816, + "step": 984 + }, + { + "epoch": 0.169040672730393, + "grad_norm": 2.3615213486971633, + "learning_rate": 1.9003477995230942e-05, + "loss": 1.8099, + "step": 985 + }, + { + "epoch": 0.16921228762656598, + "grad_norm": 2.440052416415365, + "learning_rate": 1.9001057761343512e-05, + "loss": 1.9574, + "step": 986 + }, + { + "epoch": 0.16938390252273897, + "grad_norm": 2.460941416495098, + "learning_rate": 1.8998634746533004e-05, + "loss": 1.8803, + "step": 987 + }, + { + "epoch": 0.16955551741891195, + "grad_norm": 4.451366701398478, + "learning_rate": 1.8996208951548022e-05, + "loss": 1.7882, + "step": 988 + }, + { + "epoch": 0.16972713231508496, + "grad_norm": 2.6603009507375437, + "learning_rate": 1.899378037713802e-05, + "loss": 1.7881, + "step": 989 + }, + { + "epoch": 0.16989874721125794, + "grad_norm": 2.7031088505383796, + "learning_rate": 1.8991349024053326e-05, + "loss": 1.693, + "step": 990 + }, + { + "epoch": 0.17007036210743093, + "grad_norm": 2.503672741774846, + "learning_rate": 1.898891489304511e-05, + "loss": 1.6861, + "step": 991 + }, + { + "epoch": 0.1702419770036039, + "grad_norm": 2.3955969538278223, + "learning_rate": 1.8986477984865424e-05, + "loss": 1.7797, + "step": 992 + }, + { + "epoch": 0.1704135918997769, + "grad_norm": 2.5312842171580057, + "learning_rate": 1.8984038300267153e-05, + "loss": 1.8429, + "step": 993 + }, + { + "epoch": 0.17058520679594988, + "grad_norm": 2.7008350326751676, + "learning_rate": 1.898159584000405e-05, + "loss": 1.7478, + "step": 994 + }, + { + "epoch": 0.1707568216921229, + "grad_norm": 3.2241673640984114, + "learning_rate": 1.8979150604830726e-05, + "loss": 1.9911, + "step": 995 + }, + { + "epoch": 0.17092843658829587, + "grad_norm": 3.08809992159637, + "learning_rate": 1.897670259550265e-05, + "loss": 1.9767, + "step": 996 + }, + { + "epoch": 0.17110005148446886, + "grad_norm": 2.2915437722533296, + "learning_rate": 1.8974251812776155e-05, + "loss": 1.8417, + "step": 997 + }, + { + "epoch": 0.17127166638064184, + "grad_norm": 3.2335440033475193, + "learning_rate": 1.8971798257408408e-05, + "loss": 1.9661, + "step": 998 + }, + { + "epoch": 0.17144328127681482, + "grad_norm": 2.5960898381278934, + "learning_rate": 1.8969341930157458e-05, + "loss": 1.8926, + "step": 999 + }, + { + "epoch": 0.1716148961729878, + "grad_norm": 2.595363848437771, + "learning_rate": 1.8966882831782197e-05, + "loss": 1.9053, + "step": 1000 + }, + { + "epoch": 0.1717865110691608, + "grad_norm": 3.1288593573128125, + "learning_rate": 1.8964420963042375e-05, + "loss": 1.8298, + "step": 1001 + }, + { + "epoch": 0.1719581259653338, + "grad_norm": 3.066352066925028, + "learning_rate": 1.8961956324698603e-05, + "loss": 1.7616, + "step": 1002 + }, + { + "epoch": 0.17212974086150679, + "grad_norm": 2.205000671917753, + "learning_rate": 1.895948891751234e-05, + "loss": 1.6981, + "step": 1003 + }, + { + "epoch": 0.17230135575767977, + "grad_norm": 2.3401231677521013, + "learning_rate": 1.8957018742245906e-05, + "loss": 1.8939, + "step": 1004 + }, + { + "epoch": 0.17247297065385275, + "grad_norm": 2.6038708153000956, + "learning_rate": 1.895454579966247e-05, + "loss": 1.9144, + "step": 1005 + }, + { + "epoch": 0.17264458555002574, + "grad_norm": 5.204708163513282, + "learning_rate": 1.8952070090526066e-05, + "loss": 1.7359, + "step": 1006 + }, + { + "epoch": 0.17281620044619872, + "grad_norm": 2.7438381612121736, + "learning_rate": 1.894959161560157e-05, + "loss": 1.9809, + "step": 1007 + }, + { + "epoch": 0.17298781534237173, + "grad_norm": 2.8151981464137448, + "learning_rate": 1.8947110375654732e-05, + "loss": 1.6311, + "step": 1008 + }, + { + "epoch": 0.17315943023854471, + "grad_norm": 2.653602968512543, + "learning_rate": 1.894462637145213e-05, + "loss": 1.9819, + "step": 1009 + }, + { + "epoch": 0.1733310451347177, + "grad_norm": 2.7489311926086932, + "learning_rate": 1.8942139603761222e-05, + "loss": 2.0842, + "step": 1010 + }, + { + "epoch": 0.17350266003089068, + "grad_norm": 2.4668126904731174, + "learning_rate": 1.8939650073350296e-05, + "loss": 1.5547, + "step": 1011 + }, + { + "epoch": 0.17367427492706367, + "grad_norm": 3.123375410077176, + "learning_rate": 1.8937157780988512e-05, + "loss": 1.9815, + "step": 1012 + }, + { + "epoch": 0.17384588982323665, + "grad_norm": 2.8327118437264525, + "learning_rate": 1.8934662727445877e-05, + "loss": 1.7706, + "step": 1013 + }, + { + "epoch": 0.17401750471940963, + "grad_norm": 2.3726618036391045, + "learning_rate": 1.893216491349325e-05, + "loss": 2.0324, + "step": 1014 + }, + { + "epoch": 0.17418911961558264, + "grad_norm": 2.650378326824613, + "learning_rate": 1.8929664339902342e-05, + "loss": 1.8853, + "step": 1015 + }, + { + "epoch": 0.17436073451175563, + "grad_norm": 2.497401438566898, + "learning_rate": 1.8927161007445715e-05, + "loss": 1.8044, + "step": 1016 + }, + { + "epoch": 0.1745323494079286, + "grad_norm": 3.1593763637345225, + "learning_rate": 1.8924654916896793e-05, + "loss": 2.0135, + "step": 1017 + }, + { + "epoch": 0.1747039643041016, + "grad_norm": 3.1305083044842155, + "learning_rate": 1.8922146069029843e-05, + "loss": 1.918, + "step": 1018 + }, + { + "epoch": 0.17487557920027458, + "grad_norm": 2.739394354944574, + "learning_rate": 1.8919634464619985e-05, + "loss": 1.7404, + "step": 1019 + }, + { + "epoch": 0.17504719409644756, + "grad_norm": 3.3322367314107, + "learning_rate": 1.8917120104443195e-05, + "loss": 1.7972, + "step": 1020 + }, + { + "epoch": 0.17521880899262057, + "grad_norm": 2.9088292950504195, + "learning_rate": 1.8914602989276294e-05, + "loss": 2.0117, + "step": 1021 + }, + { + "epoch": 0.17539042388879356, + "grad_norm": 3.219667799774691, + "learning_rate": 1.8912083119896956e-05, + "loss": 1.9661, + "step": 1022 + }, + { + "epoch": 0.17556203878496654, + "grad_norm": 3.495996324018158, + "learning_rate": 1.8909560497083718e-05, + "loss": 1.8407, + "step": 1023 + }, + { + "epoch": 0.17573365368113952, + "grad_norm": 9.414813858534298, + "learning_rate": 1.8907035121615946e-05, + "loss": 2.1415, + "step": 1024 + }, + { + "epoch": 0.1759052685773125, + "grad_norm": 3.4640684834247084, + "learning_rate": 1.8904506994273872e-05, + "loss": 1.7853, + "step": 1025 + }, + { + "epoch": 0.1760768834734855, + "grad_norm": 3.4016429899923306, + "learning_rate": 1.8901976115838576e-05, + "loss": 1.9636, + "step": 1026 + }, + { + "epoch": 0.17624849836965847, + "grad_norm": 3.079043867385513, + "learning_rate": 1.889944248709198e-05, + "loss": 1.6901, + "step": 1027 + }, + { + "epoch": 0.17642011326583149, + "grad_norm": 3.796691967016937, + "learning_rate": 1.889690610881687e-05, + "loss": 1.9453, + "step": 1028 + }, + { + "epoch": 0.17659172816200447, + "grad_norm": 2.952102254205979, + "learning_rate": 1.8894366981796865e-05, + "loss": 1.8344, + "step": 1029 + }, + { + "epoch": 0.17676334305817745, + "grad_norm": 3.029431693632875, + "learning_rate": 1.8891825106816444e-05, + "loss": 1.743, + "step": 1030 + }, + { + "epoch": 0.17693495795435044, + "grad_norm": 3.4461112507548926, + "learning_rate": 1.888928048466094e-05, + "loss": 1.6788, + "step": 1031 + }, + { + "epoch": 0.17710657285052342, + "grad_norm": 3.2071682047553898, + "learning_rate": 1.8886733116116515e-05, + "loss": 1.619, + "step": 1032 + }, + { + "epoch": 0.1772781877466964, + "grad_norm": 3.206436516046294, + "learning_rate": 1.8884183001970197e-05, + "loss": 2.2403, + "step": 1033 + }, + { + "epoch": 0.17744980264286941, + "grad_norm": 3.6350091791623265, + "learning_rate": 1.8881630143009856e-05, + "loss": 1.892, + "step": 1034 + }, + { + "epoch": 0.1776214175390424, + "grad_norm": 2.8812949312018206, + "learning_rate": 1.8879074540024215e-05, + "loss": 1.8083, + "step": 1035 + }, + { + "epoch": 0.17779303243521538, + "grad_norm": 2.589130333556869, + "learning_rate": 1.8876516193802838e-05, + "loss": 1.6781, + "step": 1036 + }, + { + "epoch": 0.17796464733138836, + "grad_norm": 3.804334864048245, + "learning_rate": 1.8873955105136134e-05, + "loss": 1.7368, + "step": 1037 + }, + { + "epoch": 0.17813626222756135, + "grad_norm": 3.2877881347476134, + "learning_rate": 1.8871391274815373e-05, + "loss": 1.8528, + "step": 1038 + }, + { + "epoch": 0.17830787712373433, + "grad_norm": 3.7272616273923775, + "learning_rate": 1.8868824703632658e-05, + "loss": 1.7402, + "step": 1039 + }, + { + "epoch": 0.17847949201990732, + "grad_norm": 2.585434900267579, + "learning_rate": 1.8866255392380945e-05, + "loss": 1.894, + "step": 1040 + }, + { + "epoch": 0.17865110691608033, + "grad_norm": 2.3910608720654256, + "learning_rate": 1.886368334185404e-05, + "loss": 1.8734, + "step": 1041 + }, + { + "epoch": 0.1788227218122533, + "grad_norm": 4.566912535500701, + "learning_rate": 1.8861108552846587e-05, + "loss": 2.2143, + "step": 1042 + }, + { + "epoch": 0.1789943367084263, + "grad_norm": 2.7544536727928857, + "learning_rate": 1.8858531026154077e-05, + "loss": 2.0066, + "step": 1043 + }, + { + "epoch": 0.17916595160459928, + "grad_norm": 4.827569095465524, + "learning_rate": 1.885595076257286e-05, + "loss": 1.7604, + "step": 1044 + }, + { + "epoch": 0.17933756650077226, + "grad_norm": 3.556294587132145, + "learning_rate": 1.8853367762900117e-05, + "loss": 1.7561, + "step": 1045 + }, + { + "epoch": 0.17950918139694524, + "grad_norm": 13.180361400090435, + "learning_rate": 1.8850782027933876e-05, + "loss": 1.986, + "step": 1046 + }, + { + "epoch": 0.17968079629311826, + "grad_norm": 2.679281925560701, + "learning_rate": 1.8848193558473014e-05, + "loss": 1.828, + "step": 1047 + }, + { + "epoch": 0.17985241118929124, + "grad_norm": 2.4857864808750425, + "learning_rate": 1.8845602355317257e-05, + "loss": 1.7873, + "step": 1048 + }, + { + "epoch": 0.18002402608546422, + "grad_norm": 2.573732073133298, + "learning_rate": 1.8843008419267164e-05, + "loss": 1.8803, + "step": 1049 + }, + { + "epoch": 0.1801956409816372, + "grad_norm": 2.6001052561724993, + "learning_rate": 1.8840411751124152e-05, + "loss": 2.0575, + "step": 1050 + }, + { + "epoch": 0.1803672558778102, + "grad_norm": 3.947639652408305, + "learning_rate": 1.883781235169047e-05, + "loss": 1.8958, + "step": 1051 + }, + { + "epoch": 0.18053887077398317, + "grad_norm": 2.348621344786185, + "learning_rate": 1.8835210221769214e-05, + "loss": 1.5452, + "step": 1052 + }, + { + "epoch": 0.18071048567015616, + "grad_norm": 3.324274276550934, + "learning_rate": 1.8832605362164326e-05, + "loss": 1.6412, + "step": 1053 + }, + { + "epoch": 0.18088210056632917, + "grad_norm": 2.6759678165627534, + "learning_rate": 1.8829997773680598e-05, + "loss": 1.7532, + "step": 1054 + }, + { + "epoch": 0.18105371546250215, + "grad_norm": 2.6371836469673315, + "learning_rate": 1.882738745712365e-05, + "loss": 1.9407, + "step": 1055 + }, + { + "epoch": 0.18122533035867514, + "grad_norm": 2.674592344303099, + "learning_rate": 1.8824774413299954e-05, + "loss": 1.9499, + "step": 1056 + }, + { + "epoch": 0.18139694525484812, + "grad_norm": 2.912062735091322, + "learning_rate": 1.882215864301683e-05, + "loss": 1.7272, + "step": 1057 + }, + { + "epoch": 0.1815685601510211, + "grad_norm": 2.8826820866102403, + "learning_rate": 1.8819540147082424e-05, + "loss": 1.8309, + "step": 1058 + }, + { + "epoch": 0.18174017504719409, + "grad_norm": 4.750127936117877, + "learning_rate": 1.881691892630574e-05, + "loss": 2.1138, + "step": 1059 + }, + { + "epoch": 0.1819117899433671, + "grad_norm": 3.036601337031838, + "learning_rate": 1.8814294981496616e-05, + "loss": 1.8791, + "step": 1060 + }, + { + "epoch": 0.18208340483954008, + "grad_norm": 2.965717773074859, + "learning_rate": 1.8811668313465736e-05, + "loss": 2.0173, + "step": 1061 + }, + { + "epoch": 0.18225501973571306, + "grad_norm": 3.2967787137875204, + "learning_rate": 1.8809038923024613e-05, + "loss": 1.7907, + "step": 1062 + }, + { + "epoch": 0.18242663463188605, + "grad_norm": 2.77407663563314, + "learning_rate": 1.8806406810985624e-05, + "loss": 1.6843, + "step": 1063 + }, + { + "epoch": 0.18259824952805903, + "grad_norm": 3.816663346127986, + "learning_rate": 1.8803771978161964e-05, + "loss": 1.7359, + "step": 1064 + }, + { + "epoch": 0.18276986442423201, + "grad_norm": 2.5044013306095363, + "learning_rate": 1.8801134425367676e-05, + "loss": 1.7789, + "step": 1065 + }, + { + "epoch": 0.182941479320405, + "grad_norm": 2.930811755950049, + "learning_rate": 1.8798494153417657e-05, + "loss": 1.955, + "step": 1066 + }, + { + "epoch": 0.183113094216578, + "grad_norm": 3.0432956618503297, + "learning_rate": 1.8795851163127626e-05, + "loss": 1.7762, + "step": 1067 + }, + { + "epoch": 0.183284709112751, + "grad_norm": 3.6274248005457004, + "learning_rate": 1.8793205455314147e-05, + "loss": 1.8944, + "step": 1068 + }, + { + "epoch": 0.18345632400892398, + "grad_norm": 4.374457586779356, + "learning_rate": 1.8790557030794627e-05, + "loss": 1.8761, + "step": 1069 + }, + { + "epoch": 0.18362793890509696, + "grad_norm": 2.829705551125168, + "learning_rate": 1.878790589038731e-05, + "loss": 1.7654, + "step": 1070 + }, + { + "epoch": 0.18379955380126994, + "grad_norm": 2.5614000331429962, + "learning_rate": 1.878525203491128e-05, + "loss": 1.8806, + "step": 1071 + }, + { + "epoch": 0.18397116869744293, + "grad_norm": 2.6451778633835787, + "learning_rate": 1.878259546518646e-05, + "loss": 1.9004, + "step": 1072 + }, + { + "epoch": 0.18414278359361594, + "grad_norm": 3.1207731834877106, + "learning_rate": 1.8779936182033606e-05, + "loss": 1.5279, + "step": 1073 + }, + { + "epoch": 0.18431439848978892, + "grad_norm": 2.869087511211109, + "learning_rate": 1.8777274186274325e-05, + "loss": 1.7406, + "step": 1074 + }, + { + "epoch": 0.1844860133859619, + "grad_norm": 2.5419712105170182, + "learning_rate": 1.8774609478731048e-05, + "loss": 1.7459, + "step": 1075 + }, + { + "epoch": 0.1846576282821349, + "grad_norm": 2.434385372131046, + "learning_rate": 1.8771942060227052e-05, + "loss": 1.8075, + "step": 1076 + }, + { + "epoch": 0.18482924317830787, + "grad_norm": 2.7126029989324043, + "learning_rate": 1.876927193158645e-05, + "loss": 1.8576, + "step": 1077 + }, + { + "epoch": 0.18500085807448086, + "grad_norm": 3.1373612003072893, + "learning_rate": 1.876659909363419e-05, + "loss": 1.9765, + "step": 1078 + }, + { + "epoch": 0.18517247297065384, + "grad_norm": 2.127902015337168, + "learning_rate": 1.8763923547196065e-05, + "loss": 1.7472, + "step": 1079 + }, + { + "epoch": 0.18534408786682685, + "grad_norm": 3.987208635479881, + "learning_rate": 1.8761245293098695e-05, + "loss": 1.7846, + "step": 1080 + }, + { + "epoch": 0.18551570276299983, + "grad_norm": 2.589976903982755, + "learning_rate": 1.875856433216954e-05, + "loss": 1.7642, + "step": 1081 + }, + { + "epoch": 0.18568731765917282, + "grad_norm": 3.288323301571295, + "learning_rate": 1.8755880665236895e-05, + "loss": 1.9984, + "step": 1082 + }, + { + "epoch": 0.1858589325553458, + "grad_norm": 2.488300473967599, + "learning_rate": 1.8753194293129892e-05, + "loss": 1.706, + "step": 1083 + }, + { + "epoch": 0.18603054745151879, + "grad_norm": 4.019839208045935, + "learning_rate": 1.8750505216678505e-05, + "loss": 1.8421, + "step": 1084 + }, + { + "epoch": 0.18620216234769177, + "grad_norm": 3.2029714550456485, + "learning_rate": 1.8747813436713534e-05, + "loss": 1.7128, + "step": 1085 + }, + { + "epoch": 0.18637377724386478, + "grad_norm": 3.2149925112779614, + "learning_rate": 1.874511895406662e-05, + "loss": 1.9479, + "step": 1086 + }, + { + "epoch": 0.18654539214003776, + "grad_norm": 2.39394180672311, + "learning_rate": 1.8742421769570235e-05, + "loss": 1.4238, + "step": 1087 + }, + { + "epoch": 0.18671700703621075, + "grad_norm": 2.8354364046359386, + "learning_rate": 1.8739721884057693e-05, + "loss": 2.0304, + "step": 1088 + }, + { + "epoch": 0.18688862193238373, + "grad_norm": 2.365789454238505, + "learning_rate": 1.873701929836313e-05, + "loss": 1.8165, + "step": 1089 + }, + { + "epoch": 0.18706023682855671, + "grad_norm": 2.4695820556846306, + "learning_rate": 1.873431401332153e-05, + "loss": 1.6697, + "step": 1090 + }, + { + "epoch": 0.1872318517247297, + "grad_norm": 2.3797983670935396, + "learning_rate": 1.8731606029768704e-05, + "loss": 1.536, + "step": 1091 + }, + { + "epoch": 0.18740346662090268, + "grad_norm": 2.2859708842423925, + "learning_rate": 1.8728895348541294e-05, + "loss": 1.9074, + "step": 1092 + }, + { + "epoch": 0.1875750815170757, + "grad_norm": 2.796711393950907, + "learning_rate": 1.872618197047678e-05, + "loss": 2.0874, + "step": 1093 + }, + { + "epoch": 0.18774669641324868, + "grad_norm": 2.692776743704972, + "learning_rate": 1.8723465896413476e-05, + "loss": 1.9903, + "step": 1094 + }, + { + "epoch": 0.18791831130942166, + "grad_norm": 2.9551043800793715, + "learning_rate": 1.872074712719053e-05, + "loss": 1.9889, + "step": 1095 + }, + { + "epoch": 0.18808992620559464, + "grad_norm": 3.269566715140991, + "learning_rate": 1.871802566364791e-05, + "loss": 1.7872, + "step": 1096 + }, + { + "epoch": 0.18826154110176763, + "grad_norm": 2.2442935183112196, + "learning_rate": 1.8715301506626435e-05, + "loss": 1.7215, + "step": 1097 + }, + { + "epoch": 0.1884331559979406, + "grad_norm": 3.826784809565196, + "learning_rate": 1.8712574656967743e-05, + "loss": 1.8555, + "step": 1098 + }, + { + "epoch": 0.18860477089411362, + "grad_norm": 2.4230380154716307, + "learning_rate": 1.8709845115514306e-05, + "loss": 1.6155, + "step": 1099 + }, + { + "epoch": 0.1887763857902866, + "grad_norm": 3.7052796319842707, + "learning_rate": 1.8707112883109438e-05, + "loss": 1.6623, + "step": 1100 + }, + { + "epoch": 0.1889480006864596, + "grad_norm": 2.308489916424168, + "learning_rate": 1.870437796059727e-05, + "loss": 1.6847, + "step": 1101 + }, + { + "epoch": 0.18911961558263257, + "grad_norm": 3.6337660185440246, + "learning_rate": 1.870164034882277e-05, + "loss": 2.1511, + "step": 1102 + }, + { + "epoch": 0.18929123047880556, + "grad_norm": 3.886332399848284, + "learning_rate": 1.8698900048631744e-05, + "loss": 1.627, + "step": 1103 + }, + { + "epoch": 0.18946284537497854, + "grad_norm": 2.758130505540072, + "learning_rate": 1.8696157060870815e-05, + "loss": 1.6426, + "step": 1104 + }, + { + "epoch": 0.18963446027115152, + "grad_norm": 2.978496918541371, + "learning_rate": 1.8693411386387445e-05, + "loss": 1.7926, + "step": 1105 + }, + { + "epoch": 0.18980607516732453, + "grad_norm": 2.6291954597409717, + "learning_rate": 1.869066302602992e-05, + "loss": 1.7945, + "step": 1106 + }, + { + "epoch": 0.18997769006349752, + "grad_norm": 2.6400358893561733, + "learning_rate": 1.8687911980647375e-05, + "loss": 2.0097, + "step": 1107 + }, + { + "epoch": 0.1901493049596705, + "grad_norm": 2.5078371492775817, + "learning_rate": 1.868515825108974e-05, + "loss": 1.8463, + "step": 1108 + }, + { + "epoch": 0.19032091985584348, + "grad_norm": 3.2672353956382456, + "learning_rate": 1.8682401838207808e-05, + "loss": 1.988, + "step": 1109 + }, + { + "epoch": 0.19049253475201647, + "grad_norm": 2.684958790285497, + "learning_rate": 1.8679642742853178e-05, + "loss": 1.7406, + "step": 1110 + }, + { + "epoch": 0.19066414964818945, + "grad_norm": 3.1484450070703427, + "learning_rate": 1.8676880965878293e-05, + "loss": 1.8318, + "step": 1111 + }, + { + "epoch": 0.19083576454436246, + "grad_norm": 2.6696280789216167, + "learning_rate": 1.8674116508136415e-05, + "loss": 2.0488, + "step": 1112 + }, + { + "epoch": 0.19100737944053545, + "grad_norm": 2.631949173273026, + "learning_rate": 1.8671349370481636e-05, + "loss": 1.9126, + "step": 1113 + }, + { + "epoch": 0.19117899433670843, + "grad_norm": 2.887587241068506, + "learning_rate": 1.8668579553768882e-05, + "loss": 1.9419, + "step": 1114 + }, + { + "epoch": 0.1913506092328814, + "grad_norm": 3.1809261401885265, + "learning_rate": 1.86658070588539e-05, + "loss": 1.9567, + "step": 1115 + }, + { + "epoch": 0.1915222241290544, + "grad_norm": 3.348456752055784, + "learning_rate": 1.8663031886593268e-05, + "loss": 1.8364, + "step": 1116 + }, + { + "epoch": 0.19169383902522738, + "grad_norm": 2.5074685817563416, + "learning_rate": 1.866025403784439e-05, + "loss": 1.7228, + "step": 1117 + }, + { + "epoch": 0.19186545392140036, + "grad_norm": 2.4983148708824325, + "learning_rate": 1.865747351346549e-05, + "loss": 1.7511, + "step": 1118 + }, + { + "epoch": 0.19203706881757338, + "grad_norm": 2.4450778760072547, + "learning_rate": 1.865469031431563e-05, + "loss": 1.7601, + "step": 1119 + }, + { + "epoch": 0.19220868371374636, + "grad_norm": 2.802171577959105, + "learning_rate": 1.8651904441254696e-05, + "loss": 2.0065, + "step": 1120 + }, + { + "epoch": 0.19238029860991934, + "grad_norm": 2.5400839595023994, + "learning_rate": 1.8649115895143394e-05, + "loss": 1.7311, + "step": 1121 + }, + { + "epoch": 0.19255191350609233, + "grad_norm": 3.1573210941620284, + "learning_rate": 1.8646324676843263e-05, + "loss": 1.7388, + "step": 1122 + }, + { + "epoch": 0.1927235284022653, + "grad_norm": 2.196387772358243, + "learning_rate": 1.864353078721666e-05, + "loss": 1.7639, + "step": 1123 + }, + { + "epoch": 0.1928951432984383, + "grad_norm": 3.3422846022968375, + "learning_rate": 1.8640734227126778e-05, + "loss": 1.8086, + "step": 1124 + }, + { + "epoch": 0.1930667581946113, + "grad_norm": 2.468942421946988, + "learning_rate": 1.8637934997437623e-05, + "loss": 1.6612, + "step": 1125 + }, + { + "epoch": 0.1932383730907843, + "grad_norm": 2.0826885772486308, + "learning_rate": 1.8635133099014032e-05, + "loss": 1.8644, + "step": 1126 + }, + { + "epoch": 0.19340998798695727, + "grad_norm": 2.4326412367426373, + "learning_rate": 1.8632328532721665e-05, + "loss": 1.931, + "step": 1127 + }, + { + "epoch": 0.19358160288313025, + "grad_norm": 2.6605196466013163, + "learning_rate": 1.862952129942701e-05, + "loss": 1.6765, + "step": 1128 + }, + { + "epoch": 0.19375321777930324, + "grad_norm": 2.979635303821038, + "learning_rate": 1.862671139999738e-05, + "loss": 1.5364, + "step": 1129 + }, + { + "epoch": 0.19392483267547622, + "grad_norm": 5.439138700619377, + "learning_rate": 1.86238988353009e-05, + "loss": 1.7336, + "step": 1130 + }, + { + "epoch": 0.1940964475716492, + "grad_norm": 2.4756700631713473, + "learning_rate": 1.862108360620653e-05, + "loss": 1.971, + "step": 1131 + }, + { + "epoch": 0.19426806246782222, + "grad_norm": 2.7693302346575805, + "learning_rate": 1.8618265713584047e-05, + "loss": 1.9736, + "step": 1132 + }, + { + "epoch": 0.1944396773639952, + "grad_norm": 3.188145891790364, + "learning_rate": 1.861544515830406e-05, + "loss": 1.8882, + "step": 1133 + }, + { + "epoch": 0.19461129226016818, + "grad_norm": 3.2927482288339642, + "learning_rate": 1.8612621941237986e-05, + "loss": 1.9117, + "step": 1134 + }, + { + "epoch": 0.19478290715634117, + "grad_norm": 2.6877567394223614, + "learning_rate": 1.8609796063258076e-05, + "loss": 1.6008, + "step": 1135 + }, + { + "epoch": 0.19495452205251415, + "grad_norm": 2.3054717979221953, + "learning_rate": 1.86069675252374e-05, + "loss": 1.6678, + "step": 1136 + }, + { + "epoch": 0.19512613694868713, + "grad_norm": 3.268207156742372, + "learning_rate": 1.860413632804985e-05, + "loss": 1.8276, + "step": 1137 + }, + { + "epoch": 0.19529775184486015, + "grad_norm": 3.3650128262023067, + "learning_rate": 1.8601302472570137e-05, + "loss": 1.9564, + "step": 1138 + }, + { + "epoch": 0.19546936674103313, + "grad_norm": 3.13641456636503, + "learning_rate": 1.8598465959673793e-05, + "loss": 1.6997, + "step": 1139 + }, + { + "epoch": 0.1956409816372061, + "grad_norm": 2.4888234966190854, + "learning_rate": 1.859562679023718e-05, + "loss": 1.7335, + "step": 1140 + }, + { + "epoch": 0.1958125965333791, + "grad_norm": 2.326502908304316, + "learning_rate": 1.859278496513747e-05, + "loss": 1.9794, + "step": 1141 + }, + { + "epoch": 0.19598421142955208, + "grad_norm": 3.791019241688972, + "learning_rate": 1.858994048525266e-05, + "loss": 1.9333, + "step": 1142 + }, + { + "epoch": 0.19615582632572506, + "grad_norm": 3.445224678397263, + "learning_rate": 1.8587093351461564e-05, + "loss": 1.8693, + "step": 1143 + }, + { + "epoch": 0.19632744122189805, + "grad_norm": 2.188082419713291, + "learning_rate": 1.858424356464382e-05, + "loss": 1.6474, + "step": 1144 + }, + { + "epoch": 0.19649905611807106, + "grad_norm": 2.935323731316312, + "learning_rate": 1.858139112567989e-05, + "loss": 1.6986, + "step": 1145 + }, + { + "epoch": 0.19667067101424404, + "grad_norm": 5.780751942109886, + "learning_rate": 1.8578536035451043e-05, + "loss": 1.4593, + "step": 1146 + }, + { + "epoch": 0.19684228591041703, + "grad_norm": 2.884530891955222, + "learning_rate": 1.857567829483937e-05, + "loss": 1.9083, + "step": 1147 + }, + { + "epoch": 0.19701390080659, + "grad_norm": 3.1345637189603353, + "learning_rate": 1.85728179047278e-05, + "loss": 1.6807, + "step": 1148 + }, + { + "epoch": 0.197185515702763, + "grad_norm": 3.1244632897824394, + "learning_rate": 1.8569954866000052e-05, + "loss": 1.7671, + "step": 1149 + }, + { + "epoch": 0.19735713059893598, + "grad_norm": 2.4182516923417046, + "learning_rate": 1.856708917954068e-05, + "loss": 1.4882, + "step": 1150 + }, + { + "epoch": 0.197528745495109, + "grad_norm": 2.8595853140129854, + "learning_rate": 1.856422084623505e-05, + "loss": 1.9436, + "step": 1151 + }, + { + "epoch": 0.19770036039128197, + "grad_norm": 2.4916865800570274, + "learning_rate": 1.8561349866969357e-05, + "loss": 1.5929, + "step": 1152 + }, + { + "epoch": 0.19787197528745495, + "grad_norm": 2.7711134221112976, + "learning_rate": 1.8558476242630595e-05, + "loss": 1.6756, + "step": 1153 + }, + { + "epoch": 0.19804359018362794, + "grad_norm": 3.822574826162146, + "learning_rate": 1.855559997410659e-05, + "loss": 1.9019, + "step": 1154 + }, + { + "epoch": 0.19821520507980092, + "grad_norm": 2.1176687607265112, + "learning_rate": 1.855272106228598e-05, + "loss": 1.5265, + "step": 1155 + }, + { + "epoch": 0.1983868199759739, + "grad_norm": 2.9471459296488343, + "learning_rate": 1.8549839508058215e-05, + "loss": 1.6747, + "step": 1156 + }, + { + "epoch": 0.19855843487214692, + "grad_norm": 3.3245797370321943, + "learning_rate": 1.8546955312313574e-05, + "loss": 1.7418, + "step": 1157 + }, + { + "epoch": 0.1987300497683199, + "grad_norm": 3.7590341939006287, + "learning_rate": 1.8544068475943134e-05, + "loss": 2.1677, + "step": 1158 + }, + { + "epoch": 0.19890166466449288, + "grad_norm": 2.684332707940541, + "learning_rate": 1.854117899983881e-05, + "loss": 1.8058, + "step": 1159 + }, + { + "epoch": 0.19907327956066587, + "grad_norm": 2.1433615192323, + "learning_rate": 1.8538286884893308e-05, + "loss": 1.3951, + "step": 1160 + }, + { + "epoch": 0.19924489445683885, + "grad_norm": 2.493613543357034, + "learning_rate": 1.853539213200017e-05, + "loss": 1.5709, + "step": 1161 + }, + { + "epoch": 0.19941650935301183, + "grad_norm": 2.6739102443974967, + "learning_rate": 1.8532494742053747e-05, + "loss": 2.0268, + "step": 1162 + }, + { + "epoch": 0.19958812424918482, + "grad_norm": 2.5447858904373395, + "learning_rate": 1.8529594715949194e-05, + "loss": 2.0656, + "step": 1163 + }, + { + "epoch": 0.19975973914535783, + "grad_norm": 2.7842776677002115, + "learning_rate": 1.8526692054582493e-05, + "loss": 1.7685, + "step": 1164 + }, + { + "epoch": 0.1999313540415308, + "grad_norm": 2.5921572243031625, + "learning_rate": 1.8523786758850436e-05, + "loss": 1.9377, + "step": 1165 + }, + { + "epoch": 0.2001029689377038, + "grad_norm": 2.7408247723424535, + "learning_rate": 1.852087882965063e-05, + "loss": 1.8249, + "step": 1166 + }, + { + "epoch": 0.20027458383387678, + "grad_norm": 3.2137183902875375, + "learning_rate": 1.8517968267881495e-05, + "loss": 1.9897, + "step": 1167 + }, + { + "epoch": 0.20044619873004976, + "grad_norm": 5.345600159412697, + "learning_rate": 1.851505507444226e-05, + "loss": 1.9508, + "step": 1168 + }, + { + "epoch": 0.20061781362622275, + "grad_norm": 3.193065256899719, + "learning_rate": 1.851213925023298e-05, + "loss": 2.0566, + "step": 1169 + }, + { + "epoch": 0.20078942852239576, + "grad_norm": 3.506086311061711, + "learning_rate": 1.8509220796154504e-05, + "loss": 1.9816, + "step": 1170 + }, + { + "epoch": 0.20096104341856874, + "grad_norm": 2.8444637855886468, + "learning_rate": 1.850629971310851e-05, + "loss": 1.9854, + "step": 1171 + }, + { + "epoch": 0.20113265831474172, + "grad_norm": 2.443856784353825, + "learning_rate": 1.8503376001997474e-05, + "loss": 1.7351, + "step": 1172 + }, + { + "epoch": 0.2013042732109147, + "grad_norm": 3.746737368687077, + "learning_rate": 1.85004496637247e-05, + "loss": 1.7701, + "step": 1173 + }, + { + "epoch": 0.2014758881070877, + "grad_norm": 2.711946987588241, + "learning_rate": 1.8497520699194295e-05, + "loss": 1.8346, + "step": 1174 + }, + { + "epoch": 0.20164750300326068, + "grad_norm": 2.284625633085561, + "learning_rate": 1.8494589109311168e-05, + "loss": 1.5929, + "step": 1175 + }, + { + "epoch": 0.20181911789943366, + "grad_norm": 2.3956770077097667, + "learning_rate": 1.849165489498106e-05, + "loss": 1.8978, + "step": 1176 + }, + { + "epoch": 0.20199073279560667, + "grad_norm": 2.8082931937038174, + "learning_rate": 1.8488718057110503e-05, + "loss": 1.7675, + "step": 1177 + }, + { + "epoch": 0.20216234769177965, + "grad_norm": 2.8546909357049204, + "learning_rate": 1.8485778596606855e-05, + "loss": 2.0379, + "step": 1178 + }, + { + "epoch": 0.20233396258795264, + "grad_norm": 4.375546502142223, + "learning_rate": 1.8482836514378278e-05, + "loss": 1.9957, + "step": 1179 + }, + { + "epoch": 0.20250557748412562, + "grad_norm": 2.737635853754392, + "learning_rate": 1.8479891811333735e-05, + "loss": 1.6637, + "step": 1180 + }, + { + "epoch": 0.2026771923802986, + "grad_norm": 2.54730197947167, + "learning_rate": 1.8476944488383012e-05, + "loss": 1.7397, + "step": 1181 + }, + { + "epoch": 0.2028488072764716, + "grad_norm": 2.988740542354885, + "learning_rate": 1.8473994546436702e-05, + "loss": 1.672, + "step": 1182 + }, + { + "epoch": 0.2030204221726446, + "grad_norm": 2.931000226684086, + "learning_rate": 1.84710419864062e-05, + "loss": 1.7761, + "step": 1183 + }, + { + "epoch": 0.20319203706881758, + "grad_norm": 2.4484389732536855, + "learning_rate": 1.846808680920372e-05, + "loss": 1.8635, + "step": 1184 + }, + { + "epoch": 0.20336365196499057, + "grad_norm": 2.346915203819909, + "learning_rate": 1.8465129015742273e-05, + "loss": 1.6783, + "step": 1185 + }, + { + "epoch": 0.20353526686116355, + "grad_norm": 2.483798335639931, + "learning_rate": 1.846216860693569e-05, + "loss": 1.8088, + "step": 1186 + }, + { + "epoch": 0.20370688175733653, + "grad_norm": 2.16574537927803, + "learning_rate": 1.8459205583698598e-05, + "loss": 1.665, + "step": 1187 + }, + { + "epoch": 0.20387849665350952, + "grad_norm": 2.2468116921548233, + "learning_rate": 1.8456239946946448e-05, + "loss": 1.5618, + "step": 1188 + }, + { + "epoch": 0.2040501115496825, + "grad_norm": 2.3846735595775996, + "learning_rate": 1.845327169759548e-05, + "loss": 1.7469, + "step": 1189 + }, + { + "epoch": 0.2042217264458555, + "grad_norm": 2.700436306282447, + "learning_rate": 1.8450300836562755e-05, + "loss": 1.9616, + "step": 1190 + }, + { + "epoch": 0.2043933413420285, + "grad_norm": 2.538884538732009, + "learning_rate": 1.8447327364766135e-05, + "loss": 1.79, + "step": 1191 + }, + { + "epoch": 0.20456495623820148, + "grad_norm": 2.2957387150236483, + "learning_rate": 1.8444351283124288e-05, + "loss": 1.7263, + "step": 1192 + }, + { + "epoch": 0.20473657113437446, + "grad_norm": 2.584075714012207, + "learning_rate": 1.8441372592556687e-05, + "loss": 1.85, + "step": 1193 + }, + { + "epoch": 0.20490818603054745, + "grad_norm": 2.5752319438934306, + "learning_rate": 1.8438391293983616e-05, + "loss": 1.6855, + "step": 1194 + }, + { + "epoch": 0.20507980092672043, + "grad_norm": 3.1925834615752176, + "learning_rate": 1.8435407388326167e-05, + "loss": 2.0197, + "step": 1195 + }, + { + "epoch": 0.20525141582289344, + "grad_norm": 2.8515077395439414, + "learning_rate": 1.8432420876506226e-05, + "loss": 1.8082, + "step": 1196 + }, + { + "epoch": 0.20542303071906642, + "grad_norm": 2.7004022660370786, + "learning_rate": 1.84294317594465e-05, + "loss": 1.7037, + "step": 1197 + }, + { + "epoch": 0.2055946456152394, + "grad_norm": 3.4536004497020576, + "learning_rate": 1.842644003807048e-05, + "loss": 1.5347, + "step": 1198 + }, + { + "epoch": 0.2057662605114124, + "grad_norm": 2.887357245320869, + "learning_rate": 1.8423445713302485e-05, + "loss": 1.7538, + "step": 1199 + }, + { + "epoch": 0.20593787540758537, + "grad_norm": 2.6758799097048067, + "learning_rate": 1.842044878606762e-05, + "loss": 1.8117, + "step": 1200 + }, + { + "epoch": 0.20610949030375836, + "grad_norm": 2.962525042169495, + "learning_rate": 1.8417449257291802e-05, + "loss": 1.9437, + "step": 1201 + }, + { + "epoch": 0.20628110519993134, + "grad_norm": 3.9939244474141806, + "learning_rate": 1.8414447127901758e-05, + "loss": 1.805, + "step": 1202 + }, + { + "epoch": 0.20645272009610435, + "grad_norm": 3.063423974601498, + "learning_rate": 1.8411442398825002e-05, + "loss": 1.8419, + "step": 1203 + }, + { + "epoch": 0.20662433499227734, + "grad_norm": 2.8047779634491796, + "learning_rate": 1.8408435070989866e-05, + "loss": 1.7394, + "step": 1204 + }, + { + "epoch": 0.20679594988845032, + "grad_norm": 5.474388927535528, + "learning_rate": 1.8405425145325473e-05, + "loss": 1.6637, + "step": 1205 + }, + { + "epoch": 0.2069675647846233, + "grad_norm": 2.552992855264127, + "learning_rate": 1.8402412622761762e-05, + "loss": 1.8176, + "step": 1206 + }, + { + "epoch": 0.2071391796807963, + "grad_norm": 2.71903146785116, + "learning_rate": 1.8399397504229464e-05, + "loss": 1.7233, + "step": 1207 + }, + { + "epoch": 0.20731079457696927, + "grad_norm": 3.527684060923723, + "learning_rate": 1.8396379790660118e-05, + "loss": 1.7322, + "step": 1208 + }, + { + "epoch": 0.20748240947314228, + "grad_norm": 2.49097674953974, + "learning_rate": 1.839335948298606e-05, + "loss": 1.8015, + "step": 1209 + }, + { + "epoch": 0.20765402436931527, + "grad_norm": 2.3350030981195875, + "learning_rate": 1.839033658214043e-05, + "loss": 1.7555, + "step": 1210 + }, + { + "epoch": 0.20782563926548825, + "grad_norm": 2.8352093798802587, + "learning_rate": 1.838731108905717e-05, + "loss": 1.9105, + "step": 1211 + }, + { + "epoch": 0.20799725416166123, + "grad_norm": 2.9389040097425534, + "learning_rate": 1.8384283004671014e-05, + "loss": 1.7127, + "step": 1212 + }, + { + "epoch": 0.20816886905783422, + "grad_norm": 2.5682400122707616, + "learning_rate": 1.8381252329917515e-05, + "loss": 1.7001, + "step": 1213 + }, + { + "epoch": 0.2083404839540072, + "grad_norm": 2.7530152665991356, + "learning_rate": 1.837821906573301e-05, + "loss": 1.8195, + "step": 1214 + }, + { + "epoch": 0.20851209885018018, + "grad_norm": 2.336997282502789, + "learning_rate": 1.8375183213054644e-05, + "loss": 1.8306, + "step": 1215 + }, + { + "epoch": 0.2086837137463532, + "grad_norm": 2.411554579117792, + "learning_rate": 1.837214477282036e-05, + "loss": 1.9118, + "step": 1216 + }, + { + "epoch": 0.20885532864252618, + "grad_norm": 2.5506688469463734, + "learning_rate": 1.8369103745968894e-05, + "loss": 1.8777, + "step": 1217 + }, + { + "epoch": 0.20902694353869916, + "grad_norm": 2.138704374144811, + "learning_rate": 1.8366060133439793e-05, + "loss": 1.5769, + "step": 1218 + }, + { + "epoch": 0.20919855843487214, + "grad_norm": 2.5674488506950803, + "learning_rate": 1.8363013936173393e-05, + "loss": 1.8477, + "step": 1219 + }, + { + "epoch": 0.20937017333104513, + "grad_norm": 2.295409175087095, + "learning_rate": 1.8359965155110836e-05, + "loss": 1.9262, + "step": 1220 + }, + { + "epoch": 0.2095417882272181, + "grad_norm": 2.763266998801072, + "learning_rate": 1.835691379119405e-05, + "loss": 1.9518, + "step": 1221 + }, + { + "epoch": 0.20971340312339112, + "grad_norm": 3.432923194789523, + "learning_rate": 1.835385984536578e-05, + "loss": 1.7289, + "step": 1222 + }, + { + "epoch": 0.2098850180195641, + "grad_norm": 2.547176472872316, + "learning_rate": 1.8350803318569554e-05, + "loss": 1.6852, + "step": 1223 + }, + { + "epoch": 0.2100566329157371, + "grad_norm": 4.619370165881908, + "learning_rate": 1.8347744211749703e-05, + "loss": 1.8217, + "step": 1224 + }, + { + "epoch": 0.21022824781191007, + "grad_norm": 2.4645850233578805, + "learning_rate": 1.834468252585135e-05, + "loss": 2.0458, + "step": 1225 + }, + { + "epoch": 0.21039986270808306, + "grad_norm": 3.447385594195895, + "learning_rate": 1.8341618261820425e-05, + "loss": 1.5174, + "step": 1226 + }, + { + "epoch": 0.21057147760425604, + "grad_norm": 2.9922073043169095, + "learning_rate": 1.833855142060364e-05, + "loss": 1.8422, + "step": 1227 + }, + { + "epoch": 0.21074309250042902, + "grad_norm": 3.1440995019189537, + "learning_rate": 1.8335482003148518e-05, + "loss": 1.6751, + "step": 1228 + }, + { + "epoch": 0.21091470739660204, + "grad_norm": 2.650246113047022, + "learning_rate": 1.8332410010403365e-05, + "loss": 1.5431, + "step": 1229 + }, + { + "epoch": 0.21108632229277502, + "grad_norm": 2.74448628429144, + "learning_rate": 1.83293354433173e-05, + "loss": 1.7961, + "step": 1230 + }, + { + "epoch": 0.211257937188948, + "grad_norm": 5.173773725496545, + "learning_rate": 1.8326258302840214e-05, + "loss": 2.0131, + "step": 1231 + }, + { + "epoch": 0.211429552085121, + "grad_norm": 2.7630293480019543, + "learning_rate": 1.832317858992281e-05, + "loss": 1.8199, + "step": 1232 + }, + { + "epoch": 0.21160116698129397, + "grad_norm": 2.7474099777145344, + "learning_rate": 1.8320096305516585e-05, + "loss": 1.7734, + "step": 1233 + }, + { + "epoch": 0.21177278187746695, + "grad_norm": 2.907300185016793, + "learning_rate": 1.831701145057382e-05, + "loss": 1.854, + "step": 1234 + }, + { + "epoch": 0.21194439677363996, + "grad_norm": 3.2163083969198385, + "learning_rate": 1.83139240260476e-05, + "loss": 1.9678, + "step": 1235 + }, + { + "epoch": 0.21211601166981295, + "grad_norm": 2.7971666941663, + "learning_rate": 1.8310834032891804e-05, + "loss": 1.7643, + "step": 1236 + }, + { + "epoch": 0.21228762656598593, + "grad_norm": 3.1159513156006846, + "learning_rate": 1.8307741472061097e-05, + "loss": 1.7535, + "step": 1237 + }, + { + "epoch": 0.21245924146215892, + "grad_norm": 2.9782603015278504, + "learning_rate": 1.8304646344510943e-05, + "loss": 1.7584, + "step": 1238 + }, + { + "epoch": 0.2126308563583319, + "grad_norm": 3.1394862566183743, + "learning_rate": 1.8301548651197597e-05, + "loss": 1.7506, + "step": 1239 + }, + { + "epoch": 0.21280247125450488, + "grad_norm": 2.5306688690412233, + "learning_rate": 1.8298448393078106e-05, + "loss": 1.7415, + "step": 1240 + }, + { + "epoch": 0.21297408615067787, + "grad_norm": 2.223856789977762, + "learning_rate": 1.8295345571110312e-05, + "loss": 1.525, + "step": 1241 + }, + { + "epoch": 0.21314570104685088, + "grad_norm": 2.6192094911500874, + "learning_rate": 1.829224018625285e-05, + "loss": 1.7954, + "step": 1242 + }, + { + "epoch": 0.21331731594302386, + "grad_norm": 2.356543372228777, + "learning_rate": 1.828913223946514e-05, + "loss": 1.5326, + "step": 1243 + }, + { + "epoch": 0.21348893083919684, + "grad_norm": 3.0237846537149617, + "learning_rate": 1.8286021731707404e-05, + "loss": 1.6203, + "step": 1244 + }, + { + "epoch": 0.21366054573536983, + "grad_norm": 2.7389379975079104, + "learning_rate": 1.8282908663940646e-05, + "loss": 1.6309, + "step": 1245 + }, + { + "epoch": 0.2138321606315428, + "grad_norm": 2.4741614224392046, + "learning_rate": 1.8279793037126662e-05, + "loss": 1.5894, + "step": 1246 + }, + { + "epoch": 0.2140037755277158, + "grad_norm": 2.8590232662588333, + "learning_rate": 1.8276674852228044e-05, + "loss": 1.8544, + "step": 1247 + }, + { + "epoch": 0.2141753904238888, + "grad_norm": 2.3960262226820928, + "learning_rate": 1.827355411020817e-05, + "loss": 1.7588, + "step": 1248 + }, + { + "epoch": 0.2143470053200618, + "grad_norm": 3.3339359266518667, + "learning_rate": 1.827043081203121e-05, + "loss": 1.8232, + "step": 1249 + }, + { + "epoch": 0.21451862021623477, + "grad_norm": 2.8951852696813556, + "learning_rate": 1.8267304958662126e-05, + "loss": 1.7857, + "step": 1250 + }, + { + "epoch": 0.21469023511240776, + "grad_norm": 3.582896282984653, + "learning_rate": 1.826417655106666e-05, + "loss": 1.8604, + "step": 1251 + }, + { + "epoch": 0.21486185000858074, + "grad_norm": 2.693191603662857, + "learning_rate": 1.826104559021135e-05, + "loss": 1.7705, + "step": 1252 + }, + { + "epoch": 0.21503346490475372, + "grad_norm": 2.5019122964623204, + "learning_rate": 1.825791207706353e-05, + "loss": 1.6929, + "step": 1253 + }, + { + "epoch": 0.2152050798009267, + "grad_norm": 2.3388420194713175, + "learning_rate": 1.8254776012591312e-05, + "loss": 1.7999, + "step": 1254 + }, + { + "epoch": 0.21537669469709972, + "grad_norm": 2.7547199322143108, + "learning_rate": 1.8251637397763597e-05, + "loss": 1.8926, + "step": 1255 + }, + { + "epoch": 0.2155483095932727, + "grad_norm": 2.348766823611328, + "learning_rate": 1.8248496233550076e-05, + "loss": 1.8291, + "step": 1256 + }, + { + "epoch": 0.21571992448944569, + "grad_norm": 1.9923310630157454, + "learning_rate": 1.824535252092123e-05, + "loss": 1.7714, + "step": 1257 + }, + { + "epoch": 0.21589153938561867, + "grad_norm": 2.2958424357813274, + "learning_rate": 1.8242206260848322e-05, + "loss": 1.7504, + "step": 1258 + }, + { + "epoch": 0.21606315428179165, + "grad_norm": 2.9922646790827128, + "learning_rate": 1.8239057454303406e-05, + "loss": 1.9608, + "step": 1259 + }, + { + "epoch": 0.21623476917796464, + "grad_norm": 2.586994668504713, + "learning_rate": 1.8235906102259326e-05, + "loss": 1.7195, + "step": 1260 + }, + { + "epoch": 0.21640638407413765, + "grad_norm": 2.640498253324088, + "learning_rate": 1.82327522056897e-05, + "loss": 1.8595, + "step": 1261 + }, + { + "epoch": 0.21657799897031063, + "grad_norm": 3.4541835233235725, + "learning_rate": 1.8229595765568953e-05, + "loss": 1.6397, + "step": 1262 + }, + { + "epoch": 0.21674961386648361, + "grad_norm": 2.5020780651740298, + "learning_rate": 1.8226436782872272e-05, + "loss": 1.9658, + "step": 1263 + }, + { + "epoch": 0.2169212287626566, + "grad_norm": 2.481859236058716, + "learning_rate": 1.822327525857565e-05, + "loss": 2.0094, + "step": 1264 + }, + { + "epoch": 0.21709284365882958, + "grad_norm": 3.332307394423074, + "learning_rate": 1.8220111193655847e-05, + "loss": 1.7374, + "step": 1265 + }, + { + "epoch": 0.21726445855500257, + "grad_norm": 2.971761817217155, + "learning_rate": 1.8216944589090424e-05, + "loss": 1.8184, + "step": 1266 + }, + { + "epoch": 0.21743607345117555, + "grad_norm": 3.3408218773785503, + "learning_rate": 1.8213775445857716e-05, + "loss": 2.0943, + "step": 1267 + }, + { + "epoch": 0.21760768834734856, + "grad_norm": 2.4465288144681248, + "learning_rate": 1.821060376493685e-05, + "loss": 1.8451, + "step": 1268 + }, + { + "epoch": 0.21777930324352154, + "grad_norm": 3.260912485465419, + "learning_rate": 1.820742954730773e-05, + "loss": 1.8552, + "step": 1269 + }, + { + "epoch": 0.21795091813969453, + "grad_norm": 2.4056093886019827, + "learning_rate": 1.8204252793951046e-05, + "loss": 1.8165, + "step": 1270 + }, + { + "epoch": 0.2181225330358675, + "grad_norm": 3.3773877831868293, + "learning_rate": 1.8201073505848273e-05, + "loss": 1.9007, + "step": 1271 + }, + { + "epoch": 0.2182941479320405, + "grad_norm": 3.116863891826737, + "learning_rate": 1.8197891683981673e-05, + "loss": 1.916, + "step": 1272 + }, + { + "epoch": 0.21846576282821348, + "grad_norm": 4.077474826951421, + "learning_rate": 1.8194707329334277e-05, + "loss": 1.7717, + "step": 1273 + }, + { + "epoch": 0.2186373777243865, + "grad_norm": 3.3668874942453626, + "learning_rate": 1.819152044288992e-05, + "loss": 1.8545, + "step": 1274 + }, + { + "epoch": 0.21880899262055947, + "grad_norm": 2.3761737828260365, + "learning_rate": 1.8188331025633197e-05, + "loss": 1.9399, + "step": 1275 + }, + { + "epoch": 0.21898060751673246, + "grad_norm": 3.9393008530823654, + "learning_rate": 1.81851390785495e-05, + "loss": 1.4977, + "step": 1276 + }, + { + "epoch": 0.21915222241290544, + "grad_norm": 3.50209640033252, + "learning_rate": 1.818194460262499e-05, + "loss": 1.8757, + "step": 1277 + }, + { + "epoch": 0.21932383730907842, + "grad_norm": 2.403092229844327, + "learning_rate": 1.8178747598846627e-05, + "loss": 1.9168, + "step": 1278 + }, + { + "epoch": 0.2194954522052514, + "grad_norm": 5.331340745084284, + "learning_rate": 1.8175548068202138e-05, + "loss": 1.8661, + "step": 1279 + }, + { + "epoch": 0.2196670671014244, + "grad_norm": 3.3034480373973945, + "learning_rate": 1.817234601168003e-05, + "loss": 1.9273, + "step": 1280 + }, + { + "epoch": 0.2198386819975974, + "grad_norm": 2.4959495149983595, + "learning_rate": 1.81691414302696e-05, + "loss": 1.5519, + "step": 1281 + }, + { + "epoch": 0.22001029689377039, + "grad_norm": 3.6827836948667434, + "learning_rate": 1.816593432496092e-05, + "loss": 1.5516, + "step": 1282 + }, + { + "epoch": 0.22018191178994337, + "grad_norm": 2.973148329891034, + "learning_rate": 1.8162724696744837e-05, + "loss": 1.8896, + "step": 1283 + }, + { + "epoch": 0.22035352668611635, + "grad_norm": 3.194126737333955, + "learning_rate": 1.8159512546612984e-05, + "loss": 1.6747, + "step": 1284 + }, + { + "epoch": 0.22052514158228934, + "grad_norm": 2.6032348659036004, + "learning_rate": 1.8156297875557777e-05, + "loss": 1.6555, + "step": 1285 + }, + { + "epoch": 0.22069675647846232, + "grad_norm": 2.6929426894436177, + "learning_rate": 1.81530806845724e-05, + "loss": 1.8846, + "step": 1286 + }, + { + "epoch": 0.22086837137463533, + "grad_norm": 2.6280832287436175, + "learning_rate": 1.814986097465082e-05, + "loss": 1.6971, + "step": 1287 + }, + { + "epoch": 0.22103998627080831, + "grad_norm": 2.4539562110003357, + "learning_rate": 1.814663874678778e-05, + "loss": 1.8418, + "step": 1288 + }, + { + "epoch": 0.2212116011669813, + "grad_norm": 2.5155084452298118, + "learning_rate": 1.8143414001978813e-05, + "loss": 1.6249, + "step": 1289 + }, + { + "epoch": 0.22138321606315428, + "grad_norm": 2.119527510131496, + "learning_rate": 1.8140186741220214e-05, + "loss": 1.821, + "step": 1290 + }, + { + "epoch": 0.22155483095932726, + "grad_norm": 2.9593482572765653, + "learning_rate": 1.8136956965509064e-05, + "loss": 1.7918, + "step": 1291 + }, + { + "epoch": 0.22172644585550025, + "grad_norm": 2.6152796447587825, + "learning_rate": 1.813372467584322e-05, + "loss": 1.7522, + "step": 1292 + }, + { + "epoch": 0.22189806075167323, + "grad_norm": 2.895574453223862, + "learning_rate": 1.8130489873221307e-05, + "loss": 1.7293, + "step": 1293 + }, + { + "epoch": 0.22206967564784624, + "grad_norm": 2.4988734751677395, + "learning_rate": 1.8127252558642742e-05, + "loss": 1.9581, + "step": 1294 + }, + { + "epoch": 0.22224129054401923, + "grad_norm": 3.3725290672729584, + "learning_rate": 1.812401273310771e-05, + "loss": 1.6294, + "step": 1295 + }, + { + "epoch": 0.2224129054401922, + "grad_norm": 7.791377195449263, + "learning_rate": 1.8120770397617166e-05, + "loss": 1.8664, + "step": 1296 + }, + { + "epoch": 0.2225845203363652, + "grad_norm": 3.275834011748801, + "learning_rate": 1.8117525553172853e-05, + "loss": 1.8551, + "step": 1297 + }, + { + "epoch": 0.22275613523253818, + "grad_norm": 4.0812507031430645, + "learning_rate": 1.8114278200777278e-05, + "loss": 1.838, + "step": 1298 + }, + { + "epoch": 0.22292775012871116, + "grad_norm": 2.7604762269869307, + "learning_rate": 1.8111028341433726e-05, + "loss": 1.6942, + "step": 1299 + }, + { + "epoch": 0.22309936502488417, + "grad_norm": 2.796400334012174, + "learning_rate": 1.810777597614626e-05, + "loss": 1.8064, + "step": 1300 + }, + { + "epoch": 0.22327097992105716, + "grad_norm": 2.772557371729177, + "learning_rate": 1.8104521105919715e-05, + "loss": 1.6805, + "step": 1301 + }, + { + "epoch": 0.22344259481723014, + "grad_norm": 4.088203707979652, + "learning_rate": 1.8101263731759703e-05, + "loss": 1.6589, + "step": 1302 + }, + { + "epoch": 0.22361420971340312, + "grad_norm": 2.8045707689302946, + "learning_rate": 1.8098003854672602e-05, + "loss": 1.7349, + "step": 1303 + }, + { + "epoch": 0.2237858246095761, + "grad_norm": 2.492347450003987, + "learning_rate": 1.809474147566557e-05, + "loss": 1.8442, + "step": 1304 + }, + { + "epoch": 0.2239574395057491, + "grad_norm": 2.353291139640294, + "learning_rate": 1.8091476595746528e-05, + "loss": 1.6864, + "step": 1305 + }, + { + "epoch": 0.22412905440192207, + "grad_norm": 2.7782602611246605, + "learning_rate": 1.8088209215924187e-05, + "loss": 1.9521, + "step": 1306 + }, + { + "epoch": 0.22430066929809508, + "grad_norm": 3.019808912758091, + "learning_rate": 1.808493933720802e-05, + "loss": 1.8772, + "step": 1307 + }, + { + "epoch": 0.22447228419426807, + "grad_norm": 2.6094264648910146, + "learning_rate": 1.8081666960608272e-05, + "loss": 1.7142, + "step": 1308 + }, + { + "epoch": 0.22464389909044105, + "grad_norm": 2.7692784413758047, + "learning_rate": 1.8078392087135957e-05, + "loss": 1.7153, + "step": 1309 + }, + { + "epoch": 0.22481551398661404, + "grad_norm": 2.929090732988897, + "learning_rate": 1.807511471780287e-05, + "loss": 1.8716, + "step": 1310 + }, + { + "epoch": 0.22498712888278702, + "grad_norm": 2.3056804310234993, + "learning_rate": 1.8071834853621568e-05, + "loss": 1.5846, + "step": 1311 + }, + { + "epoch": 0.22515874377896, + "grad_norm": 2.4403923199911794, + "learning_rate": 1.8068552495605375e-05, + "loss": 1.9402, + "step": 1312 + }, + { + "epoch": 0.225330358675133, + "grad_norm": 2.3832362798720834, + "learning_rate": 1.8065267644768406e-05, + "loss": 1.5751, + "step": 1313 + }, + { + "epoch": 0.225501973571306, + "grad_norm": 2.9258795068855523, + "learning_rate": 1.806198030212552e-05, + "loss": 1.7419, + "step": 1314 + }, + { + "epoch": 0.22567358846747898, + "grad_norm": 3.0735089245704237, + "learning_rate": 1.8058690468692366e-05, + "loss": 1.5883, + "step": 1315 + }, + { + "epoch": 0.22584520336365196, + "grad_norm": 2.639123869972019, + "learning_rate": 1.8055398145485354e-05, + "loss": 2.0046, + "step": 1316 + }, + { + "epoch": 0.22601681825982495, + "grad_norm": 2.512944399199607, + "learning_rate": 1.8052103333521664e-05, + "loss": 2.0952, + "step": 1317 + }, + { + "epoch": 0.22618843315599793, + "grad_norm": 2.6527425669107014, + "learning_rate": 1.804880603381924e-05, + "loss": 1.8944, + "step": 1318 + }, + { + "epoch": 0.22636004805217091, + "grad_norm": 4.923912428369115, + "learning_rate": 1.8045506247396804e-05, + "loss": 1.6397, + "step": 1319 + }, + { + "epoch": 0.22653166294834393, + "grad_norm": 3.0224951366991784, + "learning_rate": 1.8042203975273844e-05, + "loss": 1.7171, + "step": 1320 + }, + { + "epoch": 0.2267032778445169, + "grad_norm": 6.949072684049702, + "learning_rate": 1.803889921847061e-05, + "loss": 1.8849, + "step": 1321 + }, + { + "epoch": 0.2268748927406899, + "grad_norm": 3.072239052808573, + "learning_rate": 1.8035591978008127e-05, + "loss": 1.7037, + "step": 1322 + }, + { + "epoch": 0.22704650763686288, + "grad_norm": 2.817660301036934, + "learning_rate": 1.803228225490818e-05, + "loss": 1.7688, + "step": 1323 + }, + { + "epoch": 0.22721812253303586, + "grad_norm": 2.8730499684429307, + "learning_rate": 1.8028970050193327e-05, + "loss": 1.7906, + "step": 1324 + }, + { + "epoch": 0.22738973742920884, + "grad_norm": 3.5997366282468355, + "learning_rate": 1.8025655364886896e-05, + "loss": 1.734, + "step": 1325 + }, + { + "epoch": 0.22756135232538185, + "grad_norm": 3.5787006512726056, + "learning_rate": 1.802233820001297e-05, + "loss": 1.9117, + "step": 1326 + }, + { + "epoch": 0.22773296722155484, + "grad_norm": 2.6218468563756883, + "learning_rate": 1.8019018556596402e-05, + "loss": 1.4182, + "step": 1327 + }, + { + "epoch": 0.22790458211772782, + "grad_norm": 2.5995495564158153, + "learning_rate": 1.8015696435662814e-05, + "loss": 1.6654, + "step": 1328 + }, + { + "epoch": 0.2280761970139008, + "grad_norm": 2.5899646383440054, + "learning_rate": 1.80123718382386e-05, + "loss": 1.8941, + "step": 1329 + }, + { + "epoch": 0.2282478119100738, + "grad_norm": 2.931079932476063, + "learning_rate": 1.8009044765350904e-05, + "loss": 2.008, + "step": 1330 + }, + { + "epoch": 0.22841942680624677, + "grad_norm": 3.229911226927399, + "learning_rate": 1.800571521802764e-05, + "loss": 1.9049, + "step": 1331 + }, + { + "epoch": 0.22859104170241978, + "grad_norm": 2.256689366904933, + "learning_rate": 1.8002383197297496e-05, + "loss": 1.6809, + "step": 1332 + }, + { + "epoch": 0.22876265659859277, + "grad_norm": 2.2728948666540827, + "learning_rate": 1.7999048704189914e-05, + "loss": 1.6756, + "step": 1333 + }, + { + "epoch": 0.22893427149476575, + "grad_norm": 2.5291378252916368, + "learning_rate": 1.79957117397351e-05, + "loss": 1.6907, + "step": 1334 + }, + { + "epoch": 0.22910588639093873, + "grad_norm": 2.55714269970868, + "learning_rate": 1.7992372304964036e-05, + "loss": 1.9827, + "step": 1335 + }, + { + "epoch": 0.22927750128711172, + "grad_norm": 2.433216575873825, + "learning_rate": 1.798903040090844e-05, + "loss": 1.7122, + "step": 1336 + }, + { + "epoch": 0.2294491161832847, + "grad_norm": 2.474618219899491, + "learning_rate": 1.798568602860083e-05, + "loss": 1.6089, + "step": 1337 + }, + { + "epoch": 0.22962073107945769, + "grad_norm": 2.734379193098395, + "learning_rate": 1.7982339189074456e-05, + "loss": 1.6649, + "step": 1338 + }, + { + "epoch": 0.2297923459756307, + "grad_norm": 3.5470203745874263, + "learning_rate": 1.7978989883363344e-05, + "loss": 1.5472, + "step": 1339 + }, + { + "epoch": 0.22996396087180368, + "grad_norm": 2.7103573954179203, + "learning_rate": 1.7975638112502274e-05, + "loss": 1.6411, + "step": 1340 + }, + { + "epoch": 0.23013557576797666, + "grad_norm": 2.699358838758311, + "learning_rate": 1.7972283877526798e-05, + "loss": 1.7652, + "step": 1341 + }, + { + "epoch": 0.23030719066414965, + "grad_norm": 3.3102573102159347, + "learning_rate": 1.7968927179473228e-05, + "loss": 1.8146, + "step": 1342 + }, + { + "epoch": 0.23047880556032263, + "grad_norm": 2.651795657014917, + "learning_rate": 1.7965568019378625e-05, + "loss": 1.8207, + "step": 1343 + }, + { + "epoch": 0.23065042045649561, + "grad_norm": 2.4361985608756638, + "learning_rate": 1.7962206398280823e-05, + "loss": 1.9642, + "step": 1344 + }, + { + "epoch": 0.23082203535266863, + "grad_norm": 6.106790097286908, + "learning_rate": 1.7958842317218413e-05, + "loss": 1.9332, + "step": 1345 + }, + { + "epoch": 0.2309936502488416, + "grad_norm": 2.309904666725919, + "learning_rate": 1.7955475777230744e-05, + "loss": 1.6604, + "step": 1346 + }, + { + "epoch": 0.2311652651450146, + "grad_norm": 3.0506367973564976, + "learning_rate": 1.7952106779357922e-05, + "loss": 1.6837, + "step": 1347 + }, + { + "epoch": 0.23133688004118758, + "grad_norm": 2.335794128089584, + "learning_rate": 1.794873532464082e-05, + "loss": 1.6833, + "step": 1348 + }, + { + "epoch": 0.23150849493736056, + "grad_norm": 2.542333416599226, + "learning_rate": 1.7945361414121068e-05, + "loss": 1.9018, + "step": 1349 + }, + { + "epoch": 0.23168010983353354, + "grad_norm": 2.4958905772007705, + "learning_rate": 1.7941985048841052e-05, + "loss": 1.9232, + "step": 1350 + }, + { + "epoch": 0.23185172472970653, + "grad_norm": 4.471762978169001, + "learning_rate": 1.7938606229843913e-05, + "loss": 1.9173, + "step": 1351 + }, + { + "epoch": 0.23202333962587954, + "grad_norm": 2.5679115457969943, + "learning_rate": 1.793522495817356e-05, + "loss": 1.8792, + "step": 1352 + }, + { + "epoch": 0.23219495452205252, + "grad_norm": 3.6312333444594587, + "learning_rate": 1.793184123487465e-05, + "loss": 1.6292, + "step": 1353 + }, + { + "epoch": 0.2323665694182255, + "grad_norm": 2.4187153049270553, + "learning_rate": 1.7928455060992606e-05, + "loss": 1.5971, + "step": 1354 + }, + { + "epoch": 0.2325381843143985, + "grad_norm": 3.362483509005112, + "learning_rate": 1.79250664375736e-05, + "loss": 1.8521, + "step": 1355 + }, + { + "epoch": 0.23270979921057147, + "grad_norm": 2.5889713021074403, + "learning_rate": 1.7921675365664567e-05, + "loss": 1.7494, + "step": 1356 + }, + { + "epoch": 0.23288141410674446, + "grad_norm": 2.876865875021111, + "learning_rate": 1.79182818463132e-05, + "loss": 1.8587, + "step": 1357 + }, + { + "epoch": 0.23305302900291747, + "grad_norm": 2.468009015890249, + "learning_rate": 1.7914885880567933e-05, + "loss": 1.4021, + "step": 1358 + }, + { + "epoch": 0.23322464389909045, + "grad_norm": 2.8125242032724955, + "learning_rate": 1.7911487469477976e-05, + "loss": 1.8282, + "step": 1359 + }, + { + "epoch": 0.23339625879526343, + "grad_norm": 3.399451031904387, + "learning_rate": 1.7908086614093287e-05, + "loss": 1.7086, + "step": 1360 + }, + { + "epoch": 0.23356787369143642, + "grad_norm": 2.5880088775536074, + "learning_rate": 1.790468331546457e-05, + "loss": 1.8835, + "step": 1361 + }, + { + "epoch": 0.2337394885876094, + "grad_norm": 2.7956652592360864, + "learning_rate": 1.7901277574643294e-05, + "loss": 1.6929, + "step": 1362 + }, + { + "epoch": 0.23391110348378238, + "grad_norm": 2.3506902463350605, + "learning_rate": 1.7897869392681685e-05, + "loss": 1.7962, + "step": 1363 + }, + { + "epoch": 0.23408271837995537, + "grad_norm": 3.2541471863807496, + "learning_rate": 1.7894458770632715e-05, + "loss": 1.7539, + "step": 1364 + }, + { + "epoch": 0.23425433327612838, + "grad_norm": 3.10385362412242, + "learning_rate": 1.7891045709550113e-05, + "loss": 1.7094, + "step": 1365 + }, + { + "epoch": 0.23442594817230136, + "grad_norm": 2.7168375602859705, + "learning_rate": 1.788763021048836e-05, + "loss": 1.8563, + "step": 1366 + }, + { + "epoch": 0.23459756306847435, + "grad_norm": 4.079176469881752, + "learning_rate": 1.7884212274502697e-05, + "loss": 1.52, + "step": 1367 + }, + { + "epoch": 0.23476917796464733, + "grad_norm": 2.6009107561273628, + "learning_rate": 1.788079190264911e-05, + "loss": 1.6671, + "step": 1368 + }, + { + "epoch": 0.2349407928608203, + "grad_norm": 2.3774728943343106, + "learning_rate": 1.7877369095984338e-05, + "loss": 1.8783, + "step": 1369 + }, + { + "epoch": 0.2351124077569933, + "grad_norm": 3.088598340648571, + "learning_rate": 1.7873943855565882e-05, + "loss": 1.8504, + "step": 1370 + }, + { + "epoch": 0.2352840226531663, + "grad_norm": 3.07208213209689, + "learning_rate": 1.787051618245198e-05, + "loss": 1.9409, + "step": 1371 + }, + { + "epoch": 0.2354556375493393, + "grad_norm": 3.513877457366281, + "learning_rate": 1.786708607770163e-05, + "loss": 1.6769, + "step": 1372 + }, + { + "epoch": 0.23562725244551228, + "grad_norm": 2.517107634291877, + "learning_rate": 1.7863653542374588e-05, + "loss": 1.4963, + "step": 1373 + }, + { + "epoch": 0.23579886734168526, + "grad_norm": 2.9849945508609, + "learning_rate": 1.7860218577531344e-05, + "loss": 1.6938, + "step": 1374 + }, + { + "epoch": 0.23597048223785824, + "grad_norm": 2.423453837335957, + "learning_rate": 1.7856781184233152e-05, + "loss": 1.6792, + "step": 1375 + }, + { + "epoch": 0.23614209713403123, + "grad_norm": 2.4427223482429743, + "learning_rate": 1.785334136354201e-05, + "loss": 1.6274, + "step": 1376 + }, + { + "epoch": 0.2363137120302042, + "grad_norm": 2.6957913548685277, + "learning_rate": 1.784989911652068e-05, + "loss": 2.2019, + "step": 1377 + }, + { + "epoch": 0.23648532692637722, + "grad_norm": 2.5341505572678917, + "learning_rate": 1.7846454444232643e-05, + "loss": 1.8136, + "step": 1378 + }, + { + "epoch": 0.2366569418225502, + "grad_norm": 2.3019151750662807, + "learning_rate": 1.7843007347742164e-05, + "loss": 1.7144, + "step": 1379 + }, + { + "epoch": 0.2368285567187232, + "grad_norm": 2.9669999947902537, + "learning_rate": 1.7839557828114233e-05, + "loss": 1.8496, + "step": 1380 + }, + { + "epoch": 0.23700017161489617, + "grad_norm": 2.27762542111608, + "learning_rate": 1.7836105886414596e-05, + "loss": 1.729, + "step": 1381 + }, + { + "epoch": 0.23717178651106915, + "grad_norm": 2.816074391136964, + "learning_rate": 1.7832651523709757e-05, + "loss": 1.8827, + "step": 1382 + }, + { + "epoch": 0.23734340140724214, + "grad_norm": 2.2802898280291872, + "learning_rate": 1.782919474106695e-05, + "loss": 1.6265, + "step": 1383 + }, + { + "epoch": 0.23751501630341515, + "grad_norm": 2.688405805464489, + "learning_rate": 1.7825735539554167e-05, + "loss": 1.9403, + "step": 1384 + }, + { + "epoch": 0.23768663119958813, + "grad_norm": 2.3731957502721763, + "learning_rate": 1.7822273920240148e-05, + "loss": 1.6611, + "step": 1385 + }, + { + "epoch": 0.23785824609576112, + "grad_norm": 2.651584382583664, + "learning_rate": 1.781880988419438e-05, + "loss": 1.8196, + "step": 1386 + }, + { + "epoch": 0.2380298609919341, + "grad_norm": 3.4841510671078098, + "learning_rate": 1.7815343432487094e-05, + "loss": 1.8005, + "step": 1387 + }, + { + "epoch": 0.23820147588810708, + "grad_norm": 2.368677978744536, + "learning_rate": 1.7811874566189267e-05, + "loss": 1.5585, + "step": 1388 + }, + { + "epoch": 0.23837309078428007, + "grad_norm": 2.6138080533155423, + "learning_rate": 1.7808403286372622e-05, + "loss": 2.0689, + "step": 1389 + }, + { + "epoch": 0.23854470568045305, + "grad_norm": 2.2824559511668276, + "learning_rate": 1.780492959410963e-05, + "loss": 1.7085, + "step": 1390 + }, + { + "epoch": 0.23871632057662606, + "grad_norm": 2.548842199787288, + "learning_rate": 1.7801453490473503e-05, + "loss": 1.8766, + "step": 1391 + }, + { + "epoch": 0.23888793547279905, + "grad_norm": 3.47246445505934, + "learning_rate": 1.7797974976538207e-05, + "loss": 1.8677, + "step": 1392 + }, + { + "epoch": 0.23905955036897203, + "grad_norm": 3.026151651429602, + "learning_rate": 1.7794494053378445e-05, + "loss": 1.5491, + "step": 1393 + }, + { + "epoch": 0.239231165265145, + "grad_norm": 2.7824251939868105, + "learning_rate": 1.7791010722069662e-05, + "loss": 1.693, + "step": 1394 + }, + { + "epoch": 0.239402780161318, + "grad_norm": 2.9721199199088457, + "learning_rate": 1.7787524983688057e-05, + "loss": 1.6517, + "step": 1395 + }, + { + "epoch": 0.23957439505749098, + "grad_norm": 1.9539471315163741, + "learning_rate": 1.7784036839310558e-05, + "loss": 1.6446, + "step": 1396 + }, + { + "epoch": 0.239746009953664, + "grad_norm": 2.4267104977038256, + "learning_rate": 1.7780546290014858e-05, + "loss": 1.8531, + "step": 1397 + }, + { + "epoch": 0.23991762484983697, + "grad_norm": 2.7463526453657328, + "learning_rate": 1.777705333687937e-05, + "loss": 1.5894, + "step": 1398 + }, + { + "epoch": 0.24008923974600996, + "grad_norm": 2.6599455598055735, + "learning_rate": 1.7773557980983264e-05, + "loss": 1.9886, + "step": 1399 + }, + { + "epoch": 0.24026085464218294, + "grad_norm": 2.776970592981246, + "learning_rate": 1.7770060223406443e-05, + "loss": 1.8507, + "step": 1400 + }, + { + "epoch": 0.24043246953835593, + "grad_norm": 2.7128708669109134, + "learning_rate": 1.7766560065229564e-05, + "loss": 1.8109, + "step": 1401 + }, + { + "epoch": 0.2406040844345289, + "grad_norm": 2.644803081104447, + "learning_rate": 1.7763057507534016e-05, + "loss": 1.8833, + "step": 1402 + }, + { + "epoch": 0.2407756993307019, + "grad_norm": 2.987861621290512, + "learning_rate": 1.7759552551401932e-05, + "loss": 1.676, + "step": 1403 + }, + { + "epoch": 0.2409473142268749, + "grad_norm": 2.7283913541229157, + "learning_rate": 1.7756045197916188e-05, + "loss": 1.8763, + "step": 1404 + }, + { + "epoch": 0.2411189291230479, + "grad_norm": 2.4132102439700587, + "learning_rate": 1.7752535448160395e-05, + "loss": 1.8745, + "step": 1405 + }, + { + "epoch": 0.24129054401922087, + "grad_norm": 2.1056526579604204, + "learning_rate": 1.774902330321891e-05, + "loss": 1.7353, + "step": 1406 + }, + { + "epoch": 0.24146215891539385, + "grad_norm": 2.675575563607133, + "learning_rate": 1.774550876417683e-05, + "loss": 1.9649, + "step": 1407 + }, + { + "epoch": 0.24163377381156684, + "grad_norm": 2.3912318188900543, + "learning_rate": 1.7741991832119993e-05, + "loss": 1.7006, + "step": 1408 + }, + { + "epoch": 0.24180538870773982, + "grad_norm": 2.384030479853452, + "learning_rate": 1.7738472508134967e-05, + "loss": 1.6052, + "step": 1409 + }, + { + "epoch": 0.24197700360391283, + "grad_norm": 2.463324580430006, + "learning_rate": 1.7734950793309065e-05, + "loss": 1.6741, + "step": 1410 + }, + { + "epoch": 0.24214861850008582, + "grad_norm": 2.6220867721211496, + "learning_rate": 1.773142668873034e-05, + "loss": 1.8201, + "step": 1411 + }, + { + "epoch": 0.2423202333962588, + "grad_norm": 2.7285078368761875, + "learning_rate": 1.7727900195487588e-05, + "loss": 1.8388, + "step": 1412 + }, + { + "epoch": 0.24249184829243178, + "grad_norm": 2.3986630335661223, + "learning_rate": 1.772437131467033e-05, + "loss": 1.7534, + "step": 1413 + }, + { + "epoch": 0.24266346318860477, + "grad_norm": 3.1325205047159477, + "learning_rate": 1.7720840047368834e-05, + "loss": 1.9348, + "step": 1414 + }, + { + "epoch": 0.24283507808477775, + "grad_norm": 2.9702218771485476, + "learning_rate": 1.7717306394674104e-05, + "loss": 1.6474, + "step": 1415 + }, + { + "epoch": 0.24300669298095073, + "grad_norm": 2.339625482387283, + "learning_rate": 1.771377035767788e-05, + "loss": 1.8135, + "step": 1416 + }, + { + "epoch": 0.24317830787712374, + "grad_norm": 2.802924814798818, + "learning_rate": 1.771023193747264e-05, + "loss": 1.8663, + "step": 1417 + }, + { + "epoch": 0.24334992277329673, + "grad_norm": 2.379822394146494, + "learning_rate": 1.7706691135151594e-05, + "loss": 1.8176, + "step": 1418 + }, + { + "epoch": 0.2435215376694697, + "grad_norm": 2.9006252523047418, + "learning_rate": 1.7703147951808695e-05, + "loss": 1.831, + "step": 1419 + }, + { + "epoch": 0.2436931525656427, + "grad_norm": 3.477911826741412, + "learning_rate": 1.7699602388538622e-05, + "loss": 1.8788, + "step": 1420 + }, + { + "epoch": 0.24386476746181568, + "grad_norm": 3.045349739992863, + "learning_rate": 1.7696054446436805e-05, + "loss": 1.8765, + "step": 1421 + }, + { + "epoch": 0.24403638235798866, + "grad_norm": 3.164984400246872, + "learning_rate": 1.769250412659939e-05, + "loss": 1.521, + "step": 1422 + }, + { + "epoch": 0.24420799725416167, + "grad_norm": 2.591559155834003, + "learning_rate": 1.768895143012327e-05, + "loss": 1.8061, + "step": 1423 + }, + { + "epoch": 0.24437961215033466, + "grad_norm": 2.5013907866691327, + "learning_rate": 1.7685396358106063e-05, + "loss": 1.6322, + "step": 1424 + }, + { + "epoch": 0.24455122704650764, + "grad_norm": 2.5616949021669635, + "learning_rate": 1.7681838911646133e-05, + "loss": 1.7826, + "step": 1425 + }, + { + "epoch": 0.24472284194268062, + "grad_norm": 2.544567571032494, + "learning_rate": 1.7678279091842575e-05, + "loss": 1.876, + "step": 1426 + }, + { + "epoch": 0.2448944568388536, + "grad_norm": 2.437683598841562, + "learning_rate": 1.7674716899795205e-05, + "loss": 1.635, + "step": 1427 + }, + { + "epoch": 0.2450660717350266, + "grad_norm": 2.1796824396879666, + "learning_rate": 1.7671152336604584e-05, + "loss": 1.7428, + "step": 1428 + }, + { + "epoch": 0.24523768663119958, + "grad_norm": 2.6926158715778503, + "learning_rate": 1.7667585403372003e-05, + "loss": 1.8446, + "step": 1429 + }, + { + "epoch": 0.2454093015273726, + "grad_norm": 4.587942031167987, + "learning_rate": 1.7664016101199484e-05, + "loss": 1.7406, + "step": 1430 + }, + { + "epoch": 0.24558091642354557, + "grad_norm": 2.978819599352636, + "learning_rate": 1.766044443118978e-05, + "loss": 1.8303, + "step": 1431 + }, + { + "epoch": 0.24575253131971855, + "grad_norm": 2.672755091915462, + "learning_rate": 1.7656870394446382e-05, + "loss": 1.8897, + "step": 1432 + }, + { + "epoch": 0.24592414621589154, + "grad_norm": 2.7354248383324737, + "learning_rate": 1.76532939920735e-05, + "loss": 1.8856, + "step": 1433 + }, + { + "epoch": 0.24609576111206452, + "grad_norm": 2.547167265850652, + "learning_rate": 1.7649715225176085e-05, + "loss": 1.8571, + "step": 1434 + }, + { + "epoch": 0.2462673760082375, + "grad_norm": 2.61646567103413, + "learning_rate": 1.7646134094859816e-05, + "loss": 1.4694, + "step": 1435 + }, + { + "epoch": 0.24643899090441052, + "grad_norm": 2.746827887403261, + "learning_rate": 1.7642550602231097e-05, + "loss": 1.6877, + "step": 1436 + }, + { + "epoch": 0.2466106058005835, + "grad_norm": 4.199810816255603, + "learning_rate": 1.7638964748397075e-05, + "loss": 1.6023, + "step": 1437 + }, + { + "epoch": 0.24678222069675648, + "grad_norm": 3.87698775931774, + "learning_rate": 1.7635376534465614e-05, + "loss": 1.9767, + "step": 1438 + }, + { + "epoch": 0.24695383559292947, + "grad_norm": 2.5567221580273247, + "learning_rate": 1.763178596154531e-05, + "loss": 1.7989, + "step": 1439 + }, + { + "epoch": 0.24712545048910245, + "grad_norm": 3.5848240947039063, + "learning_rate": 1.7628193030745486e-05, + "loss": 1.6871, + "step": 1440 + }, + { + "epoch": 0.24729706538527543, + "grad_norm": 2.4514066562829138, + "learning_rate": 1.76245977431762e-05, + "loss": 1.8, + "step": 1441 + }, + { + "epoch": 0.24746868028144842, + "grad_norm": 2.299893926652702, + "learning_rate": 1.7621000099948237e-05, + "loss": 1.7775, + "step": 1442 + }, + { + "epoch": 0.24764029517762143, + "grad_norm": 2.727368128692727, + "learning_rate": 1.7617400102173107e-05, + "loss": 1.8309, + "step": 1443 + }, + { + "epoch": 0.2478119100737944, + "grad_norm": 3.4162596655754096, + "learning_rate": 1.761379775096304e-05, + "loss": 1.7726, + "step": 1444 + }, + { + "epoch": 0.2479835249699674, + "grad_norm": 2.247357177017449, + "learning_rate": 1.7610193047431013e-05, + "loss": 1.7182, + "step": 1445 + }, + { + "epoch": 0.24815513986614038, + "grad_norm": 2.4713394155198585, + "learning_rate": 1.760658599269071e-05, + "loss": 1.8464, + "step": 1446 + }, + { + "epoch": 0.24832675476231336, + "grad_norm": 2.280633934041197, + "learning_rate": 1.7602976587856547e-05, + "loss": 1.7349, + "step": 1447 + }, + { + "epoch": 0.24849836965848635, + "grad_norm": 2.6766718813593338, + "learning_rate": 1.759936483404367e-05, + "loss": 1.6545, + "step": 1448 + }, + { + "epoch": 0.24866998455465936, + "grad_norm": 2.2588082631025284, + "learning_rate": 1.7595750732367954e-05, + "loss": 1.7213, + "step": 1449 + }, + { + "epoch": 0.24884159945083234, + "grad_norm": 2.3445653041723125, + "learning_rate": 1.759213428394599e-05, + "loss": 1.7557, + "step": 1450 + }, + { + "epoch": 0.24901321434700532, + "grad_norm": 3.116227979413196, + "learning_rate": 1.75885154898951e-05, + "loss": 1.9927, + "step": 1451 + }, + { + "epoch": 0.2491848292431783, + "grad_norm": 2.1168598695968677, + "learning_rate": 1.7584894351333327e-05, + "loss": 1.6726, + "step": 1452 + }, + { + "epoch": 0.2493564441393513, + "grad_norm": 2.2926478860029573, + "learning_rate": 1.7581270869379443e-05, + "loss": 1.5857, + "step": 1453 + }, + { + "epoch": 0.24952805903552427, + "grad_norm": 2.7400574126544464, + "learning_rate": 1.757764504515294e-05, + "loss": 1.6226, + "step": 1454 + }, + { + "epoch": 0.24969967393169726, + "grad_norm": 3.4065919777432727, + "learning_rate": 1.7574016879774035e-05, + "loss": 1.7629, + "step": 1455 + }, + { + "epoch": 0.24987128882787027, + "grad_norm": 3.557445655104279, + "learning_rate": 1.7570386374363665e-05, + "loss": 1.6168, + "step": 1456 + }, + { + "epoch": 0.25004290372404325, + "grad_norm": 2.3393883174838495, + "learning_rate": 1.7566753530043502e-05, + "loss": 1.9891, + "step": 1457 + }, + { + "epoch": 0.2502145186202162, + "grad_norm": 3.2523515815884543, + "learning_rate": 1.7563118347935924e-05, + "loss": 1.7324, + "step": 1458 + }, + { + "epoch": 0.2503861335163892, + "grad_norm": 3.8686911441253997, + "learning_rate": 1.755948082916404e-05, + "loss": 1.8791, + "step": 1459 + }, + { + "epoch": 0.25055774841256223, + "grad_norm": 2.205555296824094, + "learning_rate": 1.7555840974851684e-05, + "loss": 1.6512, + "step": 1460 + }, + { + "epoch": 0.2507293633087352, + "grad_norm": 2.629025346770507, + "learning_rate": 1.7552198786123403e-05, + "loss": 1.8813, + "step": 1461 + }, + { + "epoch": 0.2509009782049082, + "grad_norm": 2.6630022483785907, + "learning_rate": 1.7548554264104477e-05, + "loss": 1.8821, + "step": 1462 + }, + { + "epoch": 0.25107259310108115, + "grad_norm": 2.4017256577402164, + "learning_rate": 1.754490740992089e-05, + "loss": 1.6335, + "step": 1463 + }, + { + "epoch": 0.25124420799725417, + "grad_norm": 2.5078923640316875, + "learning_rate": 1.7541258224699368e-05, + "loss": 1.7603, + "step": 1464 + }, + { + "epoch": 0.2514158228934272, + "grad_norm": 4.425776665753366, + "learning_rate": 1.7537606709567336e-05, + "loss": 1.7626, + "step": 1465 + }, + { + "epoch": 0.25158743778960013, + "grad_norm": 2.5935220142426223, + "learning_rate": 1.753395286565295e-05, + "loss": 1.8444, + "step": 1466 + }, + { + "epoch": 0.25175905268577314, + "grad_norm": 2.4942397685647872, + "learning_rate": 1.753029669408509e-05, + "loss": 1.7513, + "step": 1467 + }, + { + "epoch": 0.2519306675819461, + "grad_norm": 2.2868951218461118, + "learning_rate": 1.752663819599334e-05, + "loss": 1.6668, + "step": 1468 + }, + { + "epoch": 0.2521022824781191, + "grad_norm": 2.9498312765626884, + "learning_rate": 1.752297737250802e-05, + "loss": 1.8869, + "step": 1469 + }, + { + "epoch": 0.25227389737429207, + "grad_norm": 2.654651176624841, + "learning_rate": 1.7519314224760154e-05, + "loss": 1.6669, + "step": 1470 + }, + { + "epoch": 0.2524455122704651, + "grad_norm": 2.9431139101478765, + "learning_rate": 1.7515648753881495e-05, + "loss": 1.8762, + "step": 1471 + }, + { + "epoch": 0.2526171271666381, + "grad_norm": 2.3754066326325436, + "learning_rate": 1.7511980961004503e-05, + "loss": 1.5924, + "step": 1472 + }, + { + "epoch": 0.25278874206281104, + "grad_norm": 3.917715848659881, + "learning_rate": 1.7508310847262365e-05, + "loss": 1.7453, + "step": 1473 + }, + { + "epoch": 0.25296035695898406, + "grad_norm": 2.4358327704685068, + "learning_rate": 1.7504638413788987e-05, + "loss": 1.7729, + "step": 1474 + }, + { + "epoch": 0.253131971855157, + "grad_norm": 3.2779406397859923, + "learning_rate": 1.750096366171898e-05, + "loss": 1.7636, + "step": 1475 + }, + { + "epoch": 0.25330358675133, + "grad_norm": 2.4807346281284284, + "learning_rate": 1.749728659218768e-05, + "loss": 1.8396, + "step": 1476 + }, + { + "epoch": 0.253475201647503, + "grad_norm": 2.45213241820684, + "learning_rate": 1.749360720633113e-05, + "loss": 1.7953, + "step": 1477 + }, + { + "epoch": 0.253646816543676, + "grad_norm": 3.4834473154388794, + "learning_rate": 1.7489925505286105e-05, + "loss": 1.8063, + "step": 1478 + }, + { + "epoch": 0.253818431439849, + "grad_norm": 3.036110684046407, + "learning_rate": 1.748624149019008e-05, + "loss": 1.6119, + "step": 1479 + }, + { + "epoch": 0.25399004633602196, + "grad_norm": 3.069745673025301, + "learning_rate": 1.748255516218125e-05, + "loss": 1.6641, + "step": 1480 + }, + { + "epoch": 0.25416166123219497, + "grad_norm": 2.6950722319517, + "learning_rate": 1.7478866522398527e-05, + "loss": 1.7586, + "step": 1481 + }, + { + "epoch": 0.2543332761283679, + "grad_norm": 3.101920783319619, + "learning_rate": 1.7475175571981535e-05, + "loss": 2.0566, + "step": 1482 + }, + { + "epoch": 0.25450489102454094, + "grad_norm": 2.9183366539911493, + "learning_rate": 1.7471482312070612e-05, + "loss": 1.9166, + "step": 1483 + }, + { + "epoch": 0.2546765059207139, + "grad_norm": 2.5171956846663837, + "learning_rate": 1.7467786743806808e-05, + "loss": 1.8213, + "step": 1484 + }, + { + "epoch": 0.2548481208168869, + "grad_norm": 3.1250940964360447, + "learning_rate": 1.746408886833189e-05, + "loss": 1.5925, + "step": 1485 + }, + { + "epoch": 0.2550197357130599, + "grad_norm": 2.6527166911987687, + "learning_rate": 1.7460388686788334e-05, + "loss": 1.7109, + "step": 1486 + }, + { + "epoch": 0.25519135060923287, + "grad_norm": 2.490174400317957, + "learning_rate": 1.745668620031933e-05, + "loss": 1.4786, + "step": 1487 + }, + { + "epoch": 0.2553629655054059, + "grad_norm": 3.1067721861369773, + "learning_rate": 1.7452981410068784e-05, + "loss": 1.7134, + "step": 1488 + }, + { + "epoch": 0.25553458040157884, + "grad_norm": 3.088538156934454, + "learning_rate": 1.7449274317181304e-05, + "loss": 1.831, + "step": 1489 + }, + { + "epoch": 0.25570619529775185, + "grad_norm": 2.6600478300050976, + "learning_rate": 1.7445564922802222e-05, + "loss": 1.7196, + "step": 1490 + }, + { + "epoch": 0.25587781019392486, + "grad_norm": 2.9236034817135077, + "learning_rate": 1.7441853228077564e-05, + "loss": 1.7451, + "step": 1491 + }, + { + "epoch": 0.2560494250900978, + "grad_norm": 3.6463660055971, + "learning_rate": 1.743813923415409e-05, + "loss": 1.8362, + "step": 1492 + }, + { + "epoch": 0.2562210399862708, + "grad_norm": 4.043323228103443, + "learning_rate": 1.743442294217925e-05, + "loss": 1.6671, + "step": 1493 + }, + { + "epoch": 0.2563926548824438, + "grad_norm": 2.8808692502806204, + "learning_rate": 1.7430704353301217e-05, + "loss": 1.8547, + "step": 1494 + }, + { + "epoch": 0.2565642697786168, + "grad_norm": 2.939420360005542, + "learning_rate": 1.742698346866886e-05, + "loss": 1.9184, + "step": 1495 + }, + { + "epoch": 0.25673588467478975, + "grad_norm": 3.2706136908468704, + "learning_rate": 1.742326028943177e-05, + "loss": 1.7767, + "step": 1496 + }, + { + "epoch": 0.25690749957096276, + "grad_norm": 3.350443254319093, + "learning_rate": 1.7419534816740245e-05, + "loss": 1.9367, + "step": 1497 + }, + { + "epoch": 0.25707911446713577, + "grad_norm": 2.868776685089619, + "learning_rate": 1.7415807051745283e-05, + "loss": 1.7656, + "step": 1498 + }, + { + "epoch": 0.2572507293633087, + "grad_norm": 3.3845682656877805, + "learning_rate": 1.7412076995598603e-05, + "loss": 1.7656, + "step": 1499 + }, + { + "epoch": 0.25742234425948174, + "grad_norm": 2.7619366802629917, + "learning_rate": 1.7408344649452617e-05, + "loss": 1.967, + "step": 1500 + }, + { + "epoch": 0.2575939591556547, + "grad_norm": 2.6326147345309248, + "learning_rate": 1.740461001446046e-05, + "loss": 1.7066, + "step": 1501 + }, + { + "epoch": 0.2577655740518277, + "grad_norm": 2.466792432066721, + "learning_rate": 1.740087309177596e-05, + "loss": 1.6788, + "step": 1502 + }, + { + "epoch": 0.25793718894800066, + "grad_norm": 3.3394562383508486, + "learning_rate": 1.7397133882553664e-05, + "loss": 1.6711, + "step": 1503 + }, + { + "epoch": 0.2581088038441737, + "grad_norm": 3.1221146038995684, + "learning_rate": 1.7393392387948818e-05, + "loss": 1.6528, + "step": 1504 + }, + { + "epoch": 0.2582804187403467, + "grad_norm": 2.4062463872397846, + "learning_rate": 1.738964860911738e-05, + "loss": 1.6402, + "step": 1505 + }, + { + "epoch": 0.25845203363651964, + "grad_norm": 2.5417128723454363, + "learning_rate": 1.7385902547215998e-05, + "loss": 1.5287, + "step": 1506 + }, + { + "epoch": 0.25862364853269265, + "grad_norm": 3.3850179167964725, + "learning_rate": 1.738215420340205e-05, + "loss": 1.9801, + "step": 1507 + }, + { + "epoch": 0.2587952634288656, + "grad_norm": 2.7775339780385013, + "learning_rate": 1.7378403578833596e-05, + "loss": 1.9936, + "step": 1508 + }, + { + "epoch": 0.2589668783250386, + "grad_norm": 2.633031896440434, + "learning_rate": 1.7374650674669418e-05, + "loss": 1.7873, + "step": 1509 + }, + { + "epoch": 0.2591384932212116, + "grad_norm": 3.9769145239712183, + "learning_rate": 1.7370895492068994e-05, + "loss": 1.7204, + "step": 1510 + }, + { + "epoch": 0.2593101081173846, + "grad_norm": 2.6529149414305078, + "learning_rate": 1.7367138032192503e-05, + "loss": 1.8777, + "step": 1511 + }, + { + "epoch": 0.2594817230135576, + "grad_norm": 3.416918983967256, + "learning_rate": 1.7363378296200835e-05, + "loss": 1.8218, + "step": 1512 + }, + { + "epoch": 0.25965333790973055, + "grad_norm": 3.5573971863837963, + "learning_rate": 1.735961628525558e-05, + "loss": 1.8993, + "step": 1513 + }, + { + "epoch": 0.25982495280590356, + "grad_norm": 2.583276001721227, + "learning_rate": 1.735585200051903e-05, + "loss": 1.9064, + "step": 1514 + }, + { + "epoch": 0.2599965677020765, + "grad_norm": 2.4255648144395763, + "learning_rate": 1.735208544315418e-05, + "loss": 1.4997, + "step": 1515 + }, + { + "epoch": 0.26016818259824953, + "grad_norm": 2.6465476156962655, + "learning_rate": 1.7348316614324724e-05, + "loss": 1.758, + "step": 1516 + }, + { + "epoch": 0.26033979749442254, + "grad_norm": 2.8495224000784254, + "learning_rate": 1.7344545515195067e-05, + "loss": 1.7262, + "step": 1517 + }, + { + "epoch": 0.2605114123905955, + "grad_norm": 2.648169152536129, + "learning_rate": 1.73407721469303e-05, + "loss": 1.7133, + "step": 1518 + }, + { + "epoch": 0.2606830272867685, + "grad_norm": 2.321574008172646, + "learning_rate": 1.7336996510696238e-05, + "loss": 1.7457, + "step": 1519 + }, + { + "epoch": 0.26085464218294147, + "grad_norm": 3.7261904644744863, + "learning_rate": 1.7333218607659374e-05, + "loss": 1.9682, + "step": 1520 + }, + { + "epoch": 0.2610262570791145, + "grad_norm": 3.114392867977243, + "learning_rate": 1.732943843898691e-05, + "loss": 1.9151, + "step": 1521 + }, + { + "epoch": 0.26119787197528743, + "grad_norm": 2.902245709693817, + "learning_rate": 1.7325656005846755e-05, + "loss": 1.96, + "step": 1522 + }, + { + "epoch": 0.26136948687146044, + "grad_norm": 3.898436401554328, + "learning_rate": 1.732187130940751e-05, + "loss": 1.8937, + "step": 1523 + }, + { + "epoch": 0.26154110176763345, + "grad_norm": 3.0327781734508923, + "learning_rate": 1.7318084350838467e-05, + "loss": 1.8831, + "step": 1524 + }, + { + "epoch": 0.2617127166638064, + "grad_norm": 2.601655199053984, + "learning_rate": 1.731429513130964e-05, + "loss": 1.805, + "step": 1525 + }, + { + "epoch": 0.2618843315599794, + "grad_norm": 2.6124111405189185, + "learning_rate": 1.7310503651991717e-05, + "loss": 1.6163, + "step": 1526 + }, + { + "epoch": 0.2620559464561524, + "grad_norm": 3.1563553182485857, + "learning_rate": 1.7306709914056096e-05, + "loss": 1.9093, + "step": 1527 + }, + { + "epoch": 0.2622275613523254, + "grad_norm": 3.193772459653954, + "learning_rate": 1.730291391867488e-05, + "loss": 1.8741, + "step": 1528 + }, + { + "epoch": 0.26239917624849834, + "grad_norm": 2.625082431929434, + "learning_rate": 1.7299115667020858e-05, + "loss": 1.5032, + "step": 1529 + }, + { + "epoch": 0.26257079114467136, + "grad_norm": 3.0272481014923476, + "learning_rate": 1.7295315160267512e-05, + "loss": 1.9747, + "step": 1530 + }, + { + "epoch": 0.26274240604084437, + "grad_norm": 2.5129039812489125, + "learning_rate": 1.7291512399589038e-05, + "loss": 1.6281, + "step": 1531 + }, + { + "epoch": 0.2629140209370173, + "grad_norm": 3.2791597949419624, + "learning_rate": 1.728770738616031e-05, + "loss": 1.6892, + "step": 1532 + }, + { + "epoch": 0.26308563583319033, + "grad_norm": 2.957135847451443, + "learning_rate": 1.7283900121156915e-05, + "loss": 1.7248, + "step": 1533 + }, + { + "epoch": 0.2632572507293633, + "grad_norm": 2.998075608106472, + "learning_rate": 1.728009060575512e-05, + "loss": 1.6913, + "step": 1534 + }, + { + "epoch": 0.2634288656255363, + "grad_norm": 3.732057986843457, + "learning_rate": 1.7276278841131895e-05, + "loss": 1.6593, + "step": 1535 + }, + { + "epoch": 0.26360048052170926, + "grad_norm": 2.347499499080433, + "learning_rate": 1.727246482846491e-05, + "loss": 1.7998, + "step": 1536 + }, + { + "epoch": 0.26377209541788227, + "grad_norm": 2.522931851278705, + "learning_rate": 1.726864856893252e-05, + "loss": 1.7454, + "step": 1537 + }, + { + "epoch": 0.2639437103140553, + "grad_norm": 2.4481814095036527, + "learning_rate": 1.7264830063713777e-05, + "loss": 1.7267, + "step": 1538 + }, + { + "epoch": 0.26411532521022824, + "grad_norm": 2.6801758151298487, + "learning_rate": 1.726100931398843e-05, + "loss": 1.7726, + "step": 1539 + }, + { + "epoch": 0.26428694010640125, + "grad_norm": 3.1449830391147713, + "learning_rate": 1.7257186320936918e-05, + "loss": 1.7759, + "step": 1540 + }, + { + "epoch": 0.2644585550025742, + "grad_norm": 2.681847025467261, + "learning_rate": 1.7253361085740375e-05, + "loss": 1.6681, + "step": 1541 + }, + { + "epoch": 0.2646301698987472, + "grad_norm": 2.6528031377273837, + "learning_rate": 1.7249533609580625e-05, + "loss": 1.5982, + "step": 1542 + }, + { + "epoch": 0.2648017847949202, + "grad_norm": 3.280010562924151, + "learning_rate": 1.7245703893640188e-05, + "loss": 1.955, + "step": 1543 + }, + { + "epoch": 0.2649733996910932, + "grad_norm": 3.049427729239868, + "learning_rate": 1.7241871939102277e-05, + "loss": 1.6879, + "step": 1544 + }, + { + "epoch": 0.2651450145872662, + "grad_norm": 2.869023059104075, + "learning_rate": 1.723803774715079e-05, + "loss": 1.6638, + "step": 1545 + }, + { + "epoch": 0.26531662948343915, + "grad_norm": 3.3736156192315585, + "learning_rate": 1.7234201318970325e-05, + "loss": 1.6949, + "step": 1546 + }, + { + "epoch": 0.26548824437961216, + "grad_norm": 3.1229525212485427, + "learning_rate": 1.723036265574616e-05, + "loss": 1.9317, + "step": 1547 + }, + { + "epoch": 0.2656598592757851, + "grad_norm": 3.1484809692342415, + "learning_rate": 1.7226521758664273e-05, + "loss": 1.9209, + "step": 1548 + }, + { + "epoch": 0.2658314741719581, + "grad_norm": 3.462602389684985, + "learning_rate": 1.7222678628911335e-05, + "loss": 2.0217, + "step": 1549 + }, + { + "epoch": 0.26600308906813114, + "grad_norm": 2.5931798602928327, + "learning_rate": 1.7218833267674685e-05, + "loss": 1.7286, + "step": 1550 + }, + { + "epoch": 0.2661747039643041, + "grad_norm": 3.081804671422973, + "learning_rate": 1.7214985676142382e-05, + "loss": 1.841, + "step": 1551 + }, + { + "epoch": 0.2663463188604771, + "grad_norm": 3.1711487358492216, + "learning_rate": 1.7211135855503152e-05, + "loss": 1.7588, + "step": 1552 + }, + { + "epoch": 0.26651793375665006, + "grad_norm": 2.3529762976251503, + "learning_rate": 1.7207283806946422e-05, + "loss": 1.5071, + "step": 1553 + }, + { + "epoch": 0.26668954865282307, + "grad_norm": 3.014107897364318, + "learning_rate": 1.7203429531662298e-05, + "loss": 1.6855, + "step": 1554 + }, + { + "epoch": 0.266861163548996, + "grad_norm": 2.6722626682838655, + "learning_rate": 1.7199573030841577e-05, + "loss": 1.8494, + "step": 1555 + }, + { + "epoch": 0.26703277844516904, + "grad_norm": 3.067349525108301, + "learning_rate": 1.719571430567575e-05, + "loss": 1.6488, + "step": 1556 + }, + { + "epoch": 0.26720439334134205, + "grad_norm": 2.560878066453498, + "learning_rate": 1.7191853357356982e-05, + "loss": 1.5641, + "step": 1557 + }, + { + "epoch": 0.267376008237515, + "grad_norm": 3.5250648369135713, + "learning_rate": 1.718799018707814e-05, + "loss": 2.0262, + "step": 1558 + }, + { + "epoch": 0.267547623133688, + "grad_norm": 2.543328423488226, + "learning_rate": 1.718412479603277e-05, + "loss": 1.9759, + "step": 1559 + }, + { + "epoch": 0.267719238029861, + "grad_norm": 3.1583250595208927, + "learning_rate": 1.71802571854151e-05, + "loss": 1.8491, + "step": 1560 + }, + { + "epoch": 0.267890852926034, + "grad_norm": 3.2639542961971677, + "learning_rate": 1.717638735642005e-05, + "loss": 1.7902, + "step": 1561 + }, + { + "epoch": 0.26806246782220694, + "grad_norm": 2.5168345894504798, + "learning_rate": 1.7172515310243228e-05, + "loss": 1.8908, + "step": 1562 + }, + { + "epoch": 0.26823408271837995, + "grad_norm": 2.7279437081151534, + "learning_rate": 1.716864104808092e-05, + "loss": 1.916, + "step": 1563 + }, + { + "epoch": 0.26840569761455296, + "grad_norm": 2.60769802784938, + "learning_rate": 1.716476457113009e-05, + "loss": 1.5688, + "step": 1564 + }, + { + "epoch": 0.2685773125107259, + "grad_norm": 2.4160706593606527, + "learning_rate": 1.716088588058841e-05, + "loss": 1.6185, + "step": 1565 + }, + { + "epoch": 0.26874892740689893, + "grad_norm": 2.4514751729693276, + "learning_rate": 1.7157004977654216e-05, + "loss": 1.6715, + "step": 1566 + }, + { + "epoch": 0.2689205423030719, + "grad_norm": 2.92845139445762, + "learning_rate": 1.7153121863526526e-05, + "loss": 1.6777, + "step": 1567 + }, + { + "epoch": 0.2690921571992449, + "grad_norm": 2.68958774103703, + "learning_rate": 1.714923653940506e-05, + "loss": 1.7783, + "step": 1568 + }, + { + "epoch": 0.2692637720954179, + "grad_norm": 2.8310014903790264, + "learning_rate": 1.71453490064902e-05, + "loss": 1.7727, + "step": 1569 + }, + { + "epoch": 0.26943538699159086, + "grad_norm": 2.722793387256109, + "learning_rate": 1.714145926598302e-05, + "loss": 1.347, + "step": 1570 + }, + { + "epoch": 0.2696070018877639, + "grad_norm": 3.2556959061412947, + "learning_rate": 1.713756731908528e-05, + "loss": 1.7703, + "step": 1571 + }, + { + "epoch": 0.26977861678393683, + "grad_norm": 3.1541691642039655, + "learning_rate": 1.7133673166999414e-05, + "loss": 1.66, + "step": 1572 + }, + { + "epoch": 0.26995023168010984, + "grad_norm": 2.3661478153638447, + "learning_rate": 1.712977681092854e-05, + "loss": 1.9131, + "step": 1573 + }, + { + "epoch": 0.2701218465762828, + "grad_norm": 3.0677843253879384, + "learning_rate": 1.7125878252076458e-05, + "loss": 1.7586, + "step": 1574 + }, + { + "epoch": 0.2702934614724558, + "grad_norm": 5.631857968375433, + "learning_rate": 1.7121977491647647e-05, + "loss": 1.7252, + "step": 1575 + }, + { + "epoch": 0.2704650763686288, + "grad_norm": 2.8771813830397344, + "learning_rate": 1.711807453084727e-05, + "loss": 1.6886, + "step": 1576 + }, + { + "epoch": 0.2706366912648018, + "grad_norm": 2.7516856693041265, + "learning_rate": 1.7114169370881163e-05, + "loss": 1.7839, + "step": 1577 + }, + { + "epoch": 0.2708083061609748, + "grad_norm": 2.806482208721437, + "learning_rate": 1.7110262012955847e-05, + "loss": 1.7277, + "step": 1578 + }, + { + "epoch": 0.27097992105714774, + "grad_norm": 2.6109556523263624, + "learning_rate": 1.7106352458278524e-05, + "loss": 1.7107, + "step": 1579 + }, + { + "epoch": 0.27115153595332075, + "grad_norm": 3.2363262353487547, + "learning_rate": 1.7102440708057064e-05, + "loss": 1.905, + "step": 1580 + }, + { + "epoch": 0.2713231508494937, + "grad_norm": 2.7751988988842835, + "learning_rate": 1.7098526763500022e-05, + "loss": 1.7993, + "step": 1581 + }, + { + "epoch": 0.2714947657456667, + "grad_norm": 3.0662848666488096, + "learning_rate": 1.7094610625816637e-05, + "loss": 1.7867, + "step": 1582 + }, + { + "epoch": 0.27166638064183973, + "grad_norm": 2.9289235256200197, + "learning_rate": 1.709069229621682e-05, + "loss": 1.7546, + "step": 1583 + }, + { + "epoch": 0.2718379955380127, + "grad_norm": 2.5006326172097872, + "learning_rate": 1.7086771775911156e-05, + "loss": 1.7583, + "step": 1584 + }, + { + "epoch": 0.2720096104341857, + "grad_norm": 4.074310350851328, + "learning_rate": 1.708284906611091e-05, + "loss": 1.8577, + "step": 1585 + }, + { + "epoch": 0.27218122533035866, + "grad_norm": 3.2317818211099216, + "learning_rate": 1.7078924168028025e-05, + "loss": 1.8612, + "step": 1586 + }, + { + "epoch": 0.27235284022653167, + "grad_norm": 2.170859712314514, + "learning_rate": 1.7074997082875113e-05, + "loss": 1.8406, + "step": 1587 + }, + { + "epoch": 0.2725244551227046, + "grad_norm": 2.3826014722338185, + "learning_rate": 1.7071067811865477e-05, + "loss": 1.7281, + "step": 1588 + }, + { + "epoch": 0.27269607001887763, + "grad_norm": 2.213821435848086, + "learning_rate": 1.706713635621308e-05, + "loss": 1.7968, + "step": 1589 + }, + { + "epoch": 0.27286768491505065, + "grad_norm": 2.473429836073356, + "learning_rate": 1.7063202717132562e-05, + "loss": 1.5221, + "step": 1590 + }, + { + "epoch": 0.2730392998112236, + "grad_norm": 3.117154404756111, + "learning_rate": 1.7059266895839245e-05, + "loss": 1.5559, + "step": 1591 + }, + { + "epoch": 0.2732109147073966, + "grad_norm": 3.178333493719182, + "learning_rate": 1.7055328893549123e-05, + "loss": 1.8261, + "step": 1592 + }, + { + "epoch": 0.27338252960356957, + "grad_norm": 2.696705231677164, + "learning_rate": 1.705138871147886e-05, + "loss": 1.8176, + "step": 1593 + }, + { + "epoch": 0.2735541444997426, + "grad_norm": 3.258694724397034, + "learning_rate": 1.7047446350845795e-05, + "loss": 1.8876, + "step": 1594 + }, + { + "epoch": 0.2737257593959156, + "grad_norm": 2.4654279160585757, + "learning_rate": 1.704350181286794e-05, + "loss": 1.8809, + "step": 1595 + }, + { + "epoch": 0.27389737429208855, + "grad_norm": 2.9227767149954764, + "learning_rate": 1.7039555098763983e-05, + "loss": 1.9477, + "step": 1596 + }, + { + "epoch": 0.27406898918826156, + "grad_norm": 4.082100811735219, + "learning_rate": 1.7035606209753276e-05, + "loss": 1.6407, + "step": 1597 + }, + { + "epoch": 0.2742406040844345, + "grad_norm": 2.774090413521391, + "learning_rate": 1.7031655147055855e-05, + "loss": 1.6077, + "step": 1598 + }, + { + "epoch": 0.2744122189806075, + "grad_norm": 3.0554846264110433, + "learning_rate": 1.702770191189242e-05, + "loss": 1.5991, + "step": 1599 + }, + { + "epoch": 0.2745838338767805, + "grad_norm": 2.489644653111394, + "learning_rate": 1.7023746505484337e-05, + "loss": 1.665, + "step": 1600 + }, + { + "epoch": 0.2747554487729535, + "grad_norm": 2.7572665668713787, + "learning_rate": 1.7019788929053658e-05, + "loss": 1.8244, + "step": 1601 + }, + { + "epoch": 0.2749270636691265, + "grad_norm": 3.106052522476244, + "learning_rate": 1.7015829183823094e-05, + "loss": 1.8388, + "step": 1602 + }, + { + "epoch": 0.27509867856529946, + "grad_norm": 2.8961483418989658, + "learning_rate": 1.7011867271016023e-05, + "loss": 1.798, + "step": 1603 + }, + { + "epoch": 0.27527029346147247, + "grad_norm": 2.3577958382625455, + "learning_rate": 1.7007903191856505e-05, + "loss": 1.7003, + "step": 1604 + }, + { + "epoch": 0.2754419083576454, + "grad_norm": 3.037560530789631, + "learning_rate": 1.7003936947569263e-05, + "loss": 1.8744, + "step": 1605 + }, + { + "epoch": 0.27561352325381844, + "grad_norm": 2.647717286798901, + "learning_rate": 1.699996853937968e-05, + "loss": 2.0544, + "step": 1606 + }, + { + "epoch": 0.2757851381499914, + "grad_norm": 3.1170986190203176, + "learning_rate": 1.6995997968513823e-05, + "loss": 1.825, + "step": 1607 + }, + { + "epoch": 0.2759567530461644, + "grad_norm": 3.0588809610215266, + "learning_rate": 1.6992025236198425e-05, + "loss": 1.5916, + "step": 1608 + }, + { + "epoch": 0.2761283679423374, + "grad_norm": 2.823637237424643, + "learning_rate": 1.698805034366087e-05, + "loss": 1.8787, + "step": 1609 + }, + { + "epoch": 0.27629998283851037, + "grad_norm": 2.63789669727732, + "learning_rate": 1.6984073292129234e-05, + "loss": 1.6229, + "step": 1610 + }, + { + "epoch": 0.2764715977346834, + "grad_norm": 6.665079470920686, + "learning_rate": 1.6980094082832237e-05, + "loss": 1.8691, + "step": 1611 + }, + { + "epoch": 0.27664321263085634, + "grad_norm": 3.0561724574659372, + "learning_rate": 1.697611271699928e-05, + "loss": 1.8726, + "step": 1612 + }, + { + "epoch": 0.27681482752702935, + "grad_norm": 2.571808848759333, + "learning_rate": 1.6972129195860427e-05, + "loss": 1.9544, + "step": 1613 + }, + { + "epoch": 0.27698644242320236, + "grad_norm": 2.7739072655367214, + "learning_rate": 1.696814352064641e-05, + "loss": 1.9057, + "step": 1614 + }, + { + "epoch": 0.2771580573193753, + "grad_norm": 3.193789246699656, + "learning_rate": 1.696415569258862e-05, + "loss": 1.8527, + "step": 1615 + }, + { + "epoch": 0.27732967221554833, + "grad_norm": 3.704355570715207, + "learning_rate": 1.6960165712919115e-05, + "loss": 1.67, + "step": 1616 + }, + { + "epoch": 0.2775012871117213, + "grad_norm": 2.6885371927955024, + "learning_rate": 1.6956173582870627e-05, + "loss": 1.824, + "step": 1617 + }, + { + "epoch": 0.2776729020078943, + "grad_norm": 2.6720311768588503, + "learning_rate": 1.6952179303676537e-05, + "loss": 1.8115, + "step": 1618 + }, + { + "epoch": 0.27784451690406725, + "grad_norm": 2.6531640238603305, + "learning_rate": 1.6948182876570906e-05, + "loss": 1.7719, + "step": 1619 + }, + { + "epoch": 0.27801613180024026, + "grad_norm": 2.49510053108389, + "learning_rate": 1.6944184302788443e-05, + "loss": 1.8015, + "step": 1620 + }, + { + "epoch": 0.2781877466964133, + "grad_norm": 2.885702356718763, + "learning_rate": 1.6940183583564536e-05, + "loss": 1.8309, + "step": 1621 + }, + { + "epoch": 0.27835936159258623, + "grad_norm": 3.0244940282814157, + "learning_rate": 1.693618072013522e-05, + "loss": 1.8973, + "step": 1622 + }, + { + "epoch": 0.27853097648875924, + "grad_norm": 2.6813830208876706, + "learning_rate": 1.6932175713737207e-05, + "loss": 1.781, + "step": 1623 + }, + { + "epoch": 0.2787025913849322, + "grad_norm": 2.8750531359160023, + "learning_rate": 1.6928168565607857e-05, + "loss": 1.9627, + "step": 1624 + }, + { + "epoch": 0.2788742062811052, + "grad_norm": 3.3877500204967577, + "learning_rate": 1.6924159276985206e-05, + "loss": 1.7016, + "step": 1625 + }, + { + "epoch": 0.27904582117727816, + "grad_norm": 3.508382203916613, + "learning_rate": 1.692014784910794e-05, + "loss": 1.8295, + "step": 1626 + }, + { + "epoch": 0.2792174360734512, + "grad_norm": 2.4402284092690527, + "learning_rate": 1.6916134283215412e-05, + "loss": 1.6662, + "step": 1627 + }, + { + "epoch": 0.2793890509696242, + "grad_norm": 2.5523165571215287, + "learning_rate": 1.6912118580547633e-05, + "loss": 1.6677, + "step": 1628 + }, + { + "epoch": 0.27956066586579714, + "grad_norm": 2.8045635978020296, + "learning_rate": 1.690810074234528e-05, + "loss": 1.8417, + "step": 1629 + }, + { + "epoch": 0.27973228076197015, + "grad_norm": 3.1647165942502697, + "learning_rate": 1.6904080769849674e-05, + "loss": 1.5957, + "step": 1630 + }, + { + "epoch": 0.2799038956581431, + "grad_norm": 2.4324886715140694, + "learning_rate": 1.6900058664302814e-05, + "loss": 1.6026, + "step": 1631 + }, + { + "epoch": 0.2800755105543161, + "grad_norm": 2.489925365723236, + "learning_rate": 1.689603442694735e-05, + "loss": 1.8996, + "step": 1632 + }, + { + "epoch": 0.2802471254504891, + "grad_norm": 2.4716070950887077, + "learning_rate": 1.6892008059026587e-05, + "loss": 1.667, + "step": 1633 + }, + { + "epoch": 0.2804187403466621, + "grad_norm": 2.6020160833977743, + "learning_rate": 1.6887979561784496e-05, + "loss": 1.7024, + "step": 1634 + }, + { + "epoch": 0.2805903552428351, + "grad_norm": 2.5866720867462965, + "learning_rate": 1.6883948936465705e-05, + "loss": 1.6527, + "step": 1635 + }, + { + "epoch": 0.28076197013900805, + "grad_norm": 2.5099339412957296, + "learning_rate": 1.6879916184315485e-05, + "loss": 1.67, + "step": 1636 + }, + { + "epoch": 0.28093358503518107, + "grad_norm": 2.0252511010099923, + "learning_rate": 1.6875881306579784e-05, + "loss": 1.5212, + "step": 1637 + }, + { + "epoch": 0.281105199931354, + "grad_norm": 2.4233829720045352, + "learning_rate": 1.6871844304505197e-05, + "loss": 1.6831, + "step": 1638 + }, + { + "epoch": 0.28127681482752703, + "grad_norm": 2.890156029905627, + "learning_rate": 1.6867805179338977e-05, + "loss": 1.6371, + "step": 1639 + }, + { + "epoch": 0.28144842972370004, + "grad_norm": 3.068250416761238, + "learning_rate": 1.686376393232903e-05, + "loss": 1.8581, + "step": 1640 + }, + { + "epoch": 0.281620044619873, + "grad_norm": 2.493913046417287, + "learning_rate": 1.6859720564723925e-05, + "loss": 1.9127, + "step": 1641 + }, + { + "epoch": 0.281791659516046, + "grad_norm": 2.6231773084668464, + "learning_rate": 1.6855675077772875e-05, + "loss": 1.6763, + "step": 1642 + }, + { + "epoch": 0.28196327441221897, + "grad_norm": 2.337251737496812, + "learning_rate": 1.6851627472725758e-05, + "loss": 1.7893, + "step": 1643 + }, + { + "epoch": 0.282134889308392, + "grad_norm": 2.1506087144821358, + "learning_rate": 1.68475777508331e-05, + "loss": 1.88, + "step": 1644 + }, + { + "epoch": 0.28230650420456493, + "grad_norm": 2.3454309138999454, + "learning_rate": 1.6843525913346087e-05, + "loss": 1.6818, + "step": 1645 + }, + { + "epoch": 0.28247811910073795, + "grad_norm": 4.557589192260528, + "learning_rate": 1.683947196151655e-05, + "loss": 1.8218, + "step": 1646 + }, + { + "epoch": 0.28264973399691096, + "grad_norm": 2.764353171609156, + "learning_rate": 1.6835415896596984e-05, + "loss": 1.6638, + "step": 1647 + }, + { + "epoch": 0.2828213488930839, + "grad_norm": 3.051986174698658, + "learning_rate": 1.6831357719840523e-05, + "loss": 1.7782, + "step": 1648 + }, + { + "epoch": 0.2829929637892569, + "grad_norm": 2.5851504946670842, + "learning_rate": 1.682729743250097e-05, + "loss": 1.8075, + "step": 1649 + }, + { + "epoch": 0.2831645786854299, + "grad_norm": 2.6029778063290716, + "learning_rate": 1.682323503583277e-05, + "loss": 1.664, + "step": 1650 + }, + { + "epoch": 0.2833361935816029, + "grad_norm": 2.5199148605020203, + "learning_rate": 1.6819170531091018e-05, + "loss": 1.7638, + "step": 1651 + }, + { + "epoch": 0.28350780847777585, + "grad_norm": 2.516737709379798, + "learning_rate": 1.6815103919531465e-05, + "loss": 1.6673, + "step": 1652 + }, + { + "epoch": 0.28367942337394886, + "grad_norm": 2.776563180780891, + "learning_rate": 1.6811035202410515e-05, + "loss": 1.4634, + "step": 1653 + }, + { + "epoch": 0.28385103827012187, + "grad_norm": 2.5234906313531704, + "learning_rate": 1.680696438098521e-05, + "loss": 1.6784, + "step": 1654 + }, + { + "epoch": 0.2840226531662948, + "grad_norm": 3.67380008187408, + "learning_rate": 1.6802891456513262e-05, + "loss": 1.6739, + "step": 1655 + }, + { + "epoch": 0.28419426806246784, + "grad_norm": 3.9941325004560295, + "learning_rate": 1.6798816430253014e-05, + "loss": 1.8279, + "step": 1656 + }, + { + "epoch": 0.2843658829586408, + "grad_norm": 2.368148211691596, + "learning_rate": 1.679473930346347e-05, + "loss": 1.6006, + "step": 1657 + }, + { + "epoch": 0.2845374978548138, + "grad_norm": 2.9245378767409167, + "learning_rate": 1.6790660077404282e-05, + "loss": 1.6498, + "step": 1658 + }, + { + "epoch": 0.28470911275098676, + "grad_norm": 3.9455610570534123, + "learning_rate": 1.678657875333574e-05, + "loss": 1.75, + "step": 1659 + }, + { + "epoch": 0.28488072764715977, + "grad_norm": 2.5422718575340446, + "learning_rate": 1.67824953325188e-05, + "loss": 1.5199, + "step": 1660 + }, + { + "epoch": 0.2850523425433328, + "grad_norm": 3.3288356163393438, + "learning_rate": 1.6778409816215046e-05, + "loss": 1.912, + "step": 1661 + }, + { + "epoch": 0.28522395743950574, + "grad_norm": 2.3863330644939516, + "learning_rate": 1.6774322205686725e-05, + "loss": 1.625, + "step": 1662 + }, + { + "epoch": 0.28539557233567875, + "grad_norm": 2.6525840724139904, + "learning_rate": 1.6770232502196722e-05, + "loss": 1.6626, + "step": 1663 + }, + { + "epoch": 0.2855671872318517, + "grad_norm": 3.029489306269049, + "learning_rate": 1.6766140707008577e-05, + "loss": 1.8314, + "step": 1664 + }, + { + "epoch": 0.2857388021280247, + "grad_norm": 3.5909002708597946, + "learning_rate": 1.6762046821386466e-05, + "loss": 1.8696, + "step": 1665 + }, + { + "epoch": 0.2859104170241977, + "grad_norm": 2.621637169884585, + "learning_rate": 1.675795084659522e-05, + "loss": 1.8434, + "step": 1666 + }, + { + "epoch": 0.2860820319203707, + "grad_norm": 2.831122436219414, + "learning_rate": 1.6753852783900306e-05, + "loss": 1.5602, + "step": 1667 + }, + { + "epoch": 0.2862536468165437, + "grad_norm": 5.808637009262689, + "learning_rate": 1.674975263456785e-05, + "loss": 1.8426, + "step": 1668 + }, + { + "epoch": 0.28642526171271665, + "grad_norm": 2.8667219509840685, + "learning_rate": 1.6745650399864608e-05, + "loss": 1.7443, + "step": 1669 + }, + { + "epoch": 0.28659687660888966, + "grad_norm": 3.3201404922473516, + "learning_rate": 1.6741546081057985e-05, + "loss": 1.8115, + "step": 1670 + }, + { + "epoch": 0.2867684915050626, + "grad_norm": 2.654697033579173, + "learning_rate": 1.673743967941604e-05, + "loss": 1.7685, + "step": 1671 + }, + { + "epoch": 0.28694010640123563, + "grad_norm": 2.6299824779881447, + "learning_rate": 1.6733331196207453e-05, + "loss": 1.4929, + "step": 1672 + }, + { + "epoch": 0.28711172129740864, + "grad_norm": 4.237803955494624, + "learning_rate": 1.6729220632701574e-05, + "loss": 2.1493, + "step": 1673 + }, + { + "epoch": 0.2872833361935816, + "grad_norm": 2.8000766703041045, + "learning_rate": 1.6725107990168382e-05, + "loss": 1.7877, + "step": 1674 + }, + { + "epoch": 0.2874549510897546, + "grad_norm": 2.9569304176937643, + "learning_rate": 1.6720993269878486e-05, + "loss": 1.7197, + "step": 1675 + }, + { + "epoch": 0.28762656598592756, + "grad_norm": 2.563568384070091, + "learning_rate": 1.6716876473103165e-05, + "loss": 1.8082, + "step": 1676 + }, + { + "epoch": 0.2877981808821006, + "grad_norm": 2.7210295979197374, + "learning_rate": 1.6712757601114318e-05, + "loss": 1.8958, + "step": 1677 + }, + { + "epoch": 0.28796979577827353, + "grad_norm": 2.4161550886158056, + "learning_rate": 1.670863665518449e-05, + "loss": 1.7253, + "step": 1678 + }, + { + "epoch": 0.28814141067444654, + "grad_norm": 3.054748835078986, + "learning_rate": 1.670451363658687e-05, + "loss": 1.8488, + "step": 1679 + }, + { + "epoch": 0.28831302557061955, + "grad_norm": 2.9790578293152254, + "learning_rate": 1.670038854659529e-05, + "loss": 1.674, + "step": 1680 + }, + { + "epoch": 0.2884846404667925, + "grad_norm": 3.1763609316647625, + "learning_rate": 1.6696261386484215e-05, + "loss": 1.6418, + "step": 1681 + }, + { + "epoch": 0.2886562553629655, + "grad_norm": 2.346245286394302, + "learning_rate": 1.6692132157528748e-05, + "loss": 1.6809, + "step": 1682 + }, + { + "epoch": 0.2888278702591385, + "grad_norm": 2.3873415849252693, + "learning_rate": 1.6688000861004644e-05, + "loss": 1.7811, + "step": 1683 + }, + { + "epoch": 0.2889994851553115, + "grad_norm": 2.4419197472336562, + "learning_rate": 1.6683867498188277e-05, + "loss": 1.697, + "step": 1684 + }, + { + "epoch": 0.28917110005148444, + "grad_norm": 2.335833050990565, + "learning_rate": 1.6679732070356677e-05, + "loss": 1.5549, + "step": 1685 + }, + { + "epoch": 0.28934271494765745, + "grad_norm": 2.462280441941548, + "learning_rate": 1.6675594578787513e-05, + "loss": 1.5161, + "step": 1686 + }, + { + "epoch": 0.28951432984383046, + "grad_norm": 2.71573419847621, + "learning_rate": 1.667145502475907e-05, + "loss": 1.5512, + "step": 1687 + }, + { + "epoch": 0.2896859447400034, + "grad_norm": 3.1488969101767545, + "learning_rate": 1.6667313409550294e-05, + "loss": 1.5307, + "step": 1688 + }, + { + "epoch": 0.28985755963617643, + "grad_norm": 2.624929739552495, + "learning_rate": 1.666316973444076e-05, + "loss": 1.6547, + "step": 1689 + }, + { + "epoch": 0.2900291745323494, + "grad_norm": 3.4616030530486097, + "learning_rate": 1.6659024000710667e-05, + "loss": 1.6294, + "step": 1690 + }, + { + "epoch": 0.2902007894285224, + "grad_norm": 3.306567426869511, + "learning_rate": 1.6654876209640868e-05, + "loss": 1.7439, + "step": 1691 + }, + { + "epoch": 0.2903724043246954, + "grad_norm": 3.4716458157336962, + "learning_rate": 1.6650726362512846e-05, + "loss": 1.9092, + "step": 1692 + }, + { + "epoch": 0.29054401922086837, + "grad_norm": 2.7797580081438875, + "learning_rate": 1.6646574460608717e-05, + "loss": 1.7246, + "step": 1693 + }, + { + "epoch": 0.2907156341170414, + "grad_norm": 3.1061958235586906, + "learning_rate": 1.664242050521123e-05, + "loss": 1.9607, + "step": 1694 + }, + { + "epoch": 0.29088724901321433, + "grad_norm": 2.8194543111195305, + "learning_rate": 1.663826449760377e-05, + "loss": 1.6998, + "step": 1695 + }, + { + "epoch": 0.29105886390938734, + "grad_norm": 2.682792652281103, + "learning_rate": 1.6634106439070357e-05, + "loss": 1.8813, + "step": 1696 + }, + { + "epoch": 0.2912304788055603, + "grad_norm": 4.0851173561301595, + "learning_rate": 1.6629946330895647e-05, + "loss": 1.7592, + "step": 1697 + }, + { + "epoch": 0.2914020937017333, + "grad_norm": 2.7368511886836533, + "learning_rate": 1.662578417436493e-05, + "loss": 1.7512, + "step": 1698 + }, + { + "epoch": 0.2915737085979063, + "grad_norm": 2.6454508121555556, + "learning_rate": 1.6621619970764117e-05, + "loss": 1.7595, + "step": 1699 + }, + { + "epoch": 0.2917453234940793, + "grad_norm": 3.133916992668201, + "learning_rate": 1.6617453721379763e-05, + "loss": 1.6196, + "step": 1700 + }, + { + "epoch": 0.2919169383902523, + "grad_norm": 2.3737226116721923, + "learning_rate": 1.661328542749906e-05, + "loss": 1.7328, + "step": 1701 + }, + { + "epoch": 0.29208855328642525, + "grad_norm": 2.395553125597497, + "learning_rate": 1.6609115090409814e-05, + "loss": 1.3693, + "step": 1702 + }, + { + "epoch": 0.29226016818259826, + "grad_norm": 2.3687314750831177, + "learning_rate": 1.6604942711400478e-05, + "loss": 1.5641, + "step": 1703 + }, + { + "epoch": 0.2924317830787712, + "grad_norm": 2.7890356531540714, + "learning_rate": 1.6600768291760124e-05, + "loss": 1.7778, + "step": 1704 + }, + { + "epoch": 0.2926033979749442, + "grad_norm": 2.8093810531534555, + "learning_rate": 1.659659183277847e-05, + "loss": 1.8098, + "step": 1705 + }, + { + "epoch": 0.29277501287111724, + "grad_norm": 2.8532103382386147, + "learning_rate": 1.6592413335745845e-05, + "loss": 1.9331, + "step": 1706 + }, + { + "epoch": 0.2929466277672902, + "grad_norm": 2.253964944522325, + "learning_rate": 1.6588232801953223e-05, + "loss": 1.7093, + "step": 1707 + }, + { + "epoch": 0.2931182426634632, + "grad_norm": 2.6162674723508275, + "learning_rate": 1.6584050232692198e-05, + "loss": 1.8555, + "step": 1708 + }, + { + "epoch": 0.29328985755963616, + "grad_norm": 3.127102212282499, + "learning_rate": 1.6579865629254994e-05, + "loss": 1.83, + "step": 1709 + }, + { + "epoch": 0.29346147245580917, + "grad_norm": 2.739809753957868, + "learning_rate": 1.6575678992934472e-05, + "loss": 1.6619, + "step": 1710 + }, + { + "epoch": 0.2936330873519821, + "grad_norm": 3.234910572210153, + "learning_rate": 1.6571490325024114e-05, + "loss": 1.693, + "step": 1711 + }, + { + "epoch": 0.29380470224815514, + "grad_norm": 2.591279935651367, + "learning_rate": 1.656729962681803e-05, + "loss": 1.8893, + "step": 1712 + }, + { + "epoch": 0.29397631714432815, + "grad_norm": 2.3652394414385993, + "learning_rate": 1.656310689961095e-05, + "loss": 1.6505, + "step": 1713 + }, + { + "epoch": 0.2941479320405011, + "grad_norm": 2.73000103313852, + "learning_rate": 1.6558912144698247e-05, + "loss": 1.8394, + "step": 1714 + }, + { + "epoch": 0.2943195469366741, + "grad_norm": 3.0177166287416166, + "learning_rate": 1.655471536337591e-05, + "loss": 1.6607, + "step": 1715 + }, + { + "epoch": 0.29449116183284707, + "grad_norm": 4.053047444419392, + "learning_rate": 1.655051655694056e-05, + "loss": 1.7652, + "step": 1716 + }, + { + "epoch": 0.2946627767290201, + "grad_norm": 2.33951076411324, + "learning_rate": 1.6546315726689432e-05, + "loss": 1.6842, + "step": 1717 + }, + { + "epoch": 0.2948343916251931, + "grad_norm": 2.5701408895094944, + "learning_rate": 1.65421128739204e-05, + "loss": 1.3195, + "step": 1718 + }, + { + "epoch": 0.29500600652136605, + "grad_norm": 3.0616093421212356, + "learning_rate": 1.6537907999931958e-05, + "loss": 2.0091, + "step": 1719 + }, + { + "epoch": 0.29517762141753906, + "grad_norm": 2.4744924345100214, + "learning_rate": 1.6533701106023216e-05, + "loss": 1.5621, + "step": 1720 + }, + { + "epoch": 0.295349236313712, + "grad_norm": 2.462587815356701, + "learning_rate": 1.6529492193493923e-05, + "loss": 1.6705, + "step": 1721 + }, + { + "epoch": 0.295520851209885, + "grad_norm": 2.36585828138444, + "learning_rate": 1.6525281263644437e-05, + "loss": 1.8422, + "step": 1722 + }, + { + "epoch": 0.295692466106058, + "grad_norm": 3.0389682522565264, + "learning_rate": 1.6521068317775756e-05, + "loss": 1.7972, + "step": 1723 + }, + { + "epoch": 0.295864081002231, + "grad_norm": 3.001886018804657, + "learning_rate": 1.6516853357189483e-05, + "loss": 1.7513, + "step": 1724 + }, + { + "epoch": 0.296035695898404, + "grad_norm": 2.6329209350646017, + "learning_rate": 1.6512636383187853e-05, + "loss": 1.8232, + "step": 1725 + }, + { + "epoch": 0.29620731079457696, + "grad_norm": 3.324148958321875, + "learning_rate": 1.6508417397073727e-05, + "loss": 1.8672, + "step": 1726 + }, + { + "epoch": 0.29637892569075, + "grad_norm": 2.790546686918112, + "learning_rate": 1.6504196400150578e-05, + "loss": 1.7081, + "step": 1727 + }, + { + "epoch": 0.29655054058692293, + "grad_norm": 2.1758807378295266, + "learning_rate": 1.6499973393722504e-05, + "loss": 1.8012, + "step": 1728 + }, + { + "epoch": 0.29672215548309594, + "grad_norm": 3.22765072068188, + "learning_rate": 1.6495748379094227e-05, + "loss": 1.725, + "step": 1729 + }, + { + "epoch": 0.2968937703792689, + "grad_norm": 2.6520320172964196, + "learning_rate": 1.6491521357571086e-05, + "loss": 1.729, + "step": 1730 + }, + { + "epoch": 0.2970653852754419, + "grad_norm": 2.627842688883535, + "learning_rate": 1.6487292330459047e-05, + "loss": 1.7606, + "step": 1731 + }, + { + "epoch": 0.2972370001716149, + "grad_norm": 2.207401780762021, + "learning_rate": 1.648306129906468e-05, + "loss": 1.4981, + "step": 1732 + }, + { + "epoch": 0.2974086150677879, + "grad_norm": 2.536885985861452, + "learning_rate": 1.6478828264695183e-05, + "loss": 1.6621, + "step": 1733 + }, + { + "epoch": 0.2975802299639609, + "grad_norm": 4.213339771160178, + "learning_rate": 1.647459322865838e-05, + "loss": 1.4314, + "step": 1734 + }, + { + "epoch": 0.29775184486013384, + "grad_norm": 3.15147452213016, + "learning_rate": 1.647035619226271e-05, + "loss": 1.9904, + "step": 1735 + }, + { + "epoch": 0.29792345975630685, + "grad_norm": 3.0136237246467283, + "learning_rate": 1.646611715681722e-05, + "loss": 1.8061, + "step": 1736 + }, + { + "epoch": 0.2980950746524798, + "grad_norm": 2.5644202184853824, + "learning_rate": 1.6461876123631587e-05, + "loss": 1.7583, + "step": 1737 + }, + { + "epoch": 0.2982666895486528, + "grad_norm": 4.084269607841627, + "learning_rate": 1.64576330940161e-05, + "loss": 1.8055, + "step": 1738 + }, + { + "epoch": 0.29843830444482583, + "grad_norm": 3.278608000435779, + "learning_rate": 1.645338806928166e-05, + "loss": 1.4704, + "step": 1739 + }, + { + "epoch": 0.2986099193409988, + "grad_norm": 2.774297752748693, + "learning_rate": 1.644914105073979e-05, + "loss": 1.724, + "step": 1740 + }, + { + "epoch": 0.2987815342371718, + "grad_norm": 2.9731654672955283, + "learning_rate": 1.644489203970263e-05, + "loss": 2.0269, + "step": 1741 + }, + { + "epoch": 0.29895314913334475, + "grad_norm": 2.442395412713044, + "learning_rate": 1.6440641037482934e-05, + "loss": 1.5501, + "step": 1742 + }, + { + "epoch": 0.29912476402951776, + "grad_norm": 3.259578702903151, + "learning_rate": 1.6436388045394072e-05, + "loss": 1.7035, + "step": 1743 + }, + { + "epoch": 0.2992963789256908, + "grad_norm": 3.204422537908801, + "learning_rate": 1.6432133064750026e-05, + "loss": 1.8385, + "step": 1744 + }, + { + "epoch": 0.29946799382186373, + "grad_norm": 6.683879740831648, + "learning_rate": 1.6427876096865394e-05, + "loss": 1.6804, + "step": 1745 + }, + { + "epoch": 0.29963960871803674, + "grad_norm": 3.127057468542608, + "learning_rate": 1.642361714305539e-05, + "loss": 1.674, + "step": 1746 + }, + { + "epoch": 0.2998112236142097, + "grad_norm": 2.718917309799963, + "learning_rate": 1.641935620463584e-05, + "loss": 1.7098, + "step": 1747 + }, + { + "epoch": 0.2999828385103827, + "grad_norm": 2.550536402175234, + "learning_rate": 1.641509328292318e-05, + "loss": 1.6888, + "step": 1748 + }, + { + "epoch": 0.30015445340655567, + "grad_norm": 3.3331862042007745, + "learning_rate": 1.6410828379234464e-05, + "loss": 1.9296, + "step": 1749 + }, + { + "epoch": 0.3003260683027287, + "grad_norm": 2.9491079315939768, + "learning_rate": 1.640656149488735e-05, + "loss": 1.8108, + "step": 1750 + }, + { + "epoch": 0.3004976831989017, + "grad_norm": 3.1241231978724366, + "learning_rate": 1.640229263120012e-05, + "loss": 1.7843, + "step": 1751 + }, + { + "epoch": 0.30066929809507464, + "grad_norm": 3.121422839882914, + "learning_rate": 1.6398021789491663e-05, + "loss": 1.6403, + "step": 1752 + }, + { + "epoch": 0.30084091299124766, + "grad_norm": 3.6887422859317414, + "learning_rate": 1.6393748971081475e-05, + "loss": 1.7536, + "step": 1753 + }, + { + "epoch": 0.3010125278874206, + "grad_norm": 2.587404898098536, + "learning_rate": 1.6389474177289663e-05, + "loss": 1.471, + "step": 1754 + }, + { + "epoch": 0.3011841427835936, + "grad_norm": 2.2049938633288613, + "learning_rate": 1.6385197409436952e-05, + "loss": 1.4469, + "step": 1755 + }, + { + "epoch": 0.3013557576797666, + "grad_norm": 2.6534542979014883, + "learning_rate": 1.6380918668844663e-05, + "loss": 1.7421, + "step": 1756 + }, + { + "epoch": 0.3015273725759396, + "grad_norm": 2.5086732034298667, + "learning_rate": 1.6376637956834743e-05, + "loss": 1.4126, + "step": 1757 + }, + { + "epoch": 0.3016989874721126, + "grad_norm": 3.037704432094303, + "learning_rate": 1.6372355274729735e-05, + "loss": 1.6292, + "step": 1758 + }, + { + "epoch": 0.30187060236828556, + "grad_norm": 2.3380990696325465, + "learning_rate": 1.6368070623852794e-05, + "loss": 1.8651, + "step": 1759 + }, + { + "epoch": 0.30204221726445857, + "grad_norm": 2.472032221925406, + "learning_rate": 1.6363784005527693e-05, + "loss": 1.6272, + "step": 1760 + }, + { + "epoch": 0.3022138321606315, + "grad_norm": 2.63741323464442, + "learning_rate": 1.6359495421078802e-05, + "loss": 1.9139, + "step": 1761 + }, + { + "epoch": 0.30238544705680453, + "grad_norm": 4.346864817646478, + "learning_rate": 1.6355204871831094e-05, + "loss": 1.9148, + "step": 1762 + }, + { + "epoch": 0.3025570619529775, + "grad_norm": 2.82903428799297, + "learning_rate": 1.635091235911016e-05, + "loss": 1.8922, + "step": 1763 + }, + { + "epoch": 0.3027286768491505, + "grad_norm": 2.7497813392873707, + "learning_rate": 1.6346617884242204e-05, + "loss": 1.6227, + "step": 1764 + }, + { + "epoch": 0.3029002917453235, + "grad_norm": 2.730719694488057, + "learning_rate": 1.634232144855401e-05, + "loss": 1.788, + "step": 1765 + }, + { + "epoch": 0.30307190664149647, + "grad_norm": 2.668694156588302, + "learning_rate": 1.6338023053372994e-05, + "loss": 1.6601, + "step": 1766 + }, + { + "epoch": 0.3032435215376695, + "grad_norm": 2.569734190081979, + "learning_rate": 1.6333722700027163e-05, + "loss": 1.656, + "step": 1767 + }, + { + "epoch": 0.30341513643384244, + "grad_norm": 3.060594153632445, + "learning_rate": 1.632942038984514e-05, + "loss": 1.6883, + "step": 1768 + }, + { + "epoch": 0.30358675133001545, + "grad_norm": 2.382066490108781, + "learning_rate": 1.6325116124156136e-05, + "loss": 1.7599, + "step": 1769 + }, + { + "epoch": 0.30375836622618846, + "grad_norm": 2.4498203789125514, + "learning_rate": 1.6320809904289982e-05, + "loss": 1.5753, + "step": 1770 + }, + { + "epoch": 0.3039299811223614, + "grad_norm": 3.588753215770666, + "learning_rate": 1.6316501731577103e-05, + "loss": 1.6, + "step": 1771 + }, + { + "epoch": 0.3041015960185344, + "grad_norm": 2.764391826647667, + "learning_rate": 1.6312191607348537e-05, + "loss": 1.9093, + "step": 1772 + }, + { + "epoch": 0.3042732109147074, + "grad_norm": 2.8390715985245527, + "learning_rate": 1.6307879532935915e-05, + "loss": 1.7381, + "step": 1773 + }, + { + "epoch": 0.3044448258108804, + "grad_norm": 2.6472469549054054, + "learning_rate": 1.6303565509671476e-05, + "loss": 1.6926, + "step": 1774 + }, + { + "epoch": 0.30461644070705335, + "grad_norm": 2.722237747392849, + "learning_rate": 1.629924953888806e-05, + "loss": 1.7755, + "step": 1775 + }, + { + "epoch": 0.30478805560322636, + "grad_norm": 2.45239831264883, + "learning_rate": 1.6294931621919102e-05, + "loss": 1.7763, + "step": 1776 + }, + { + "epoch": 0.30495967049939937, + "grad_norm": 2.3149445676952984, + "learning_rate": 1.6290611760098655e-05, + "loss": 1.5728, + "step": 1777 + }, + { + "epoch": 0.3051312853955723, + "grad_norm": 2.8559190565308463, + "learning_rate": 1.6286289954761356e-05, + "loss": 1.9329, + "step": 1778 + }, + { + "epoch": 0.30530290029174534, + "grad_norm": 2.3993261673035615, + "learning_rate": 1.628196620724245e-05, + "loss": 1.4961, + "step": 1779 + }, + { + "epoch": 0.3054745151879183, + "grad_norm": 2.4723113811590536, + "learning_rate": 1.627764051887778e-05, + "loss": 1.9386, + "step": 1780 + }, + { + "epoch": 0.3056461300840913, + "grad_norm": 3.044971374821416, + "learning_rate": 1.6273312891003795e-05, + "loss": 1.7095, + "step": 1781 + }, + { + "epoch": 0.30581774498026426, + "grad_norm": 2.6244314868994714, + "learning_rate": 1.6268983324957536e-05, + "loss": 1.5359, + "step": 1782 + }, + { + "epoch": 0.3059893598764373, + "grad_norm": 3.156057128599002, + "learning_rate": 1.6264651822076636e-05, + "loss": 1.9254, + "step": 1783 + }, + { + "epoch": 0.3061609747726103, + "grad_norm": 3.1314213120396066, + "learning_rate": 1.6260318383699342e-05, + "loss": 1.707, + "step": 1784 + }, + { + "epoch": 0.30633258966878324, + "grad_norm": 2.5100764173079617, + "learning_rate": 1.62559830111645e-05, + "loss": 1.4651, + "step": 1785 + }, + { + "epoch": 0.30650420456495625, + "grad_norm": 2.7570370383903144, + "learning_rate": 1.625164570581153e-05, + "loss": 1.7877, + "step": 1786 + }, + { + "epoch": 0.3066758194611292, + "grad_norm": 2.172394923864771, + "learning_rate": 1.6247306468980475e-05, + "loss": 1.7598, + "step": 1787 + }, + { + "epoch": 0.3068474343573022, + "grad_norm": 2.1017876594405087, + "learning_rate": 1.624296530201196e-05, + "loss": 1.5646, + "step": 1788 + }, + { + "epoch": 0.30701904925347523, + "grad_norm": 2.7120160979800514, + "learning_rate": 1.6238622206247218e-05, + "loss": 1.6566, + "step": 1789 + }, + { + "epoch": 0.3071906641496482, + "grad_norm": 2.6644639905803933, + "learning_rate": 1.623427718302806e-05, + "loss": 1.9006, + "step": 1790 + }, + { + "epoch": 0.3073622790458212, + "grad_norm": 3.936265447486098, + "learning_rate": 1.6229930233696913e-05, + "loss": 1.7847, + "step": 1791 + }, + { + "epoch": 0.30753389394199415, + "grad_norm": 2.328242938953495, + "learning_rate": 1.622558135959678e-05, + "loss": 1.6377, + "step": 1792 + }, + { + "epoch": 0.30770550883816716, + "grad_norm": 3.427588101333531, + "learning_rate": 1.622123056207128e-05, + "loss": 1.7046, + "step": 1793 + }, + { + "epoch": 0.3078771237343401, + "grad_norm": 2.6373744651708613, + "learning_rate": 1.62168778424646e-05, + "loss": 1.7134, + "step": 1794 + }, + { + "epoch": 0.30804873863051313, + "grad_norm": 2.7219651990903384, + "learning_rate": 1.6212523202121547e-05, + "loss": 1.7167, + "step": 1795 + }, + { + "epoch": 0.30822035352668614, + "grad_norm": 2.963670856455669, + "learning_rate": 1.6208166642387502e-05, + "loss": 1.7537, + "step": 1796 + }, + { + "epoch": 0.3083919684228591, + "grad_norm": 2.5976787611875096, + "learning_rate": 1.6203808164608454e-05, + "loss": 1.7955, + "step": 1797 + }, + { + "epoch": 0.3085635833190321, + "grad_norm": 2.5656805423137263, + "learning_rate": 1.619944777013097e-05, + "loss": 1.6677, + "step": 1798 + }, + { + "epoch": 0.30873519821520506, + "grad_norm": 3.005673973507895, + "learning_rate": 1.619508546030222e-05, + "loss": 1.5713, + "step": 1799 + }, + { + "epoch": 0.3089068131113781, + "grad_norm": 2.962738495974791, + "learning_rate": 1.619072123646996e-05, + "loss": 1.9836, + "step": 1800 + }, + { + "epoch": 0.30907842800755103, + "grad_norm": 2.9875705315600465, + "learning_rate": 1.6186355099982537e-05, + "loss": 1.7827, + "step": 1801 + }, + { + "epoch": 0.30925004290372404, + "grad_norm": 2.6151243091238814, + "learning_rate": 1.61819870521889e-05, + "loss": 1.6703, + "step": 1802 + }, + { + "epoch": 0.30942165779989705, + "grad_norm": 3.204141538019496, + "learning_rate": 1.6177617094438572e-05, + "loss": 1.7566, + "step": 1803 + }, + { + "epoch": 0.30959327269607, + "grad_norm": 2.740639238720865, + "learning_rate": 1.617324522808168e-05, + "loss": 1.5124, + "step": 1804 + }, + { + "epoch": 0.309764887592243, + "grad_norm": 3.396120257408002, + "learning_rate": 1.6168871454468925e-05, + "loss": 1.6964, + "step": 1805 + }, + { + "epoch": 0.309936502488416, + "grad_norm": 2.3611667032023935, + "learning_rate": 1.616449577495162e-05, + "loss": 1.7037, + "step": 1806 + }, + { + "epoch": 0.310108117384589, + "grad_norm": 2.929923445896195, + "learning_rate": 1.6160118190881643e-05, + "loss": 1.9567, + "step": 1807 + }, + { + "epoch": 0.31027973228076194, + "grad_norm": 2.528745624589099, + "learning_rate": 1.615573870361147e-05, + "loss": 1.54, + "step": 1808 + }, + { + "epoch": 0.31045134717693496, + "grad_norm": 3.603022535568774, + "learning_rate": 1.615135731449418e-05, + "loss": 1.9665, + "step": 1809 + }, + { + "epoch": 0.31062296207310797, + "grad_norm": 2.6122507840237046, + "learning_rate": 1.6146974024883414e-05, + "loss": 1.6567, + "step": 1810 + }, + { + "epoch": 0.3107945769692809, + "grad_norm": 2.581608715522504, + "learning_rate": 1.6142588836133413e-05, + "loss": 1.6351, + "step": 1811 + }, + { + "epoch": 0.31096619186545393, + "grad_norm": 2.547061230280792, + "learning_rate": 1.6138201749599e-05, + "loss": 1.6298, + "step": 1812 + }, + { + "epoch": 0.3111378067616269, + "grad_norm": 2.877789041695864, + "learning_rate": 1.6133812766635603e-05, + "loss": 1.9615, + "step": 1813 + }, + { + "epoch": 0.3113094216577999, + "grad_norm": 2.679265069265271, + "learning_rate": 1.61294218885992e-05, + "loss": 1.6718, + "step": 1814 + }, + { + "epoch": 0.3114810365539729, + "grad_norm": 2.701284834429953, + "learning_rate": 1.6125029116846394e-05, + "loss": 1.6384, + "step": 1815 + }, + { + "epoch": 0.31165265145014587, + "grad_norm": 2.908716215284335, + "learning_rate": 1.6120634452734346e-05, + "loss": 1.7692, + "step": 1816 + }, + { + "epoch": 0.3118242663463189, + "grad_norm": 2.9046631749123666, + "learning_rate": 1.611623789762081e-05, + "loss": 1.8231, + "step": 1817 + }, + { + "epoch": 0.31199588124249183, + "grad_norm": 2.3493255594838502, + "learning_rate": 1.6111839452864123e-05, + "loss": 1.433, + "step": 1818 + }, + { + "epoch": 0.31216749613866485, + "grad_norm": 2.40880166929836, + "learning_rate": 1.610743911982321e-05, + "loss": 1.6991, + "step": 1819 + }, + { + "epoch": 0.3123391110348378, + "grad_norm": 3.0096267354692903, + "learning_rate": 1.6103036899857574e-05, + "loss": 1.6666, + "step": 1820 + }, + { + "epoch": 0.3125107259310108, + "grad_norm": 2.5063850198276074, + "learning_rate": 1.6098632794327308e-05, + "loss": 1.7437, + "step": 1821 + }, + { + "epoch": 0.3126823408271838, + "grad_norm": 2.769509742980168, + "learning_rate": 1.609422680459308e-05, + "loss": 1.8533, + "step": 1822 + }, + { + "epoch": 0.3128539557233568, + "grad_norm": 2.1296962849740355, + "learning_rate": 1.608981893201614e-05, + "loss": 1.6587, + "step": 1823 + }, + { + "epoch": 0.3130255706195298, + "grad_norm": 2.815288268734331, + "learning_rate": 1.608540917795833e-05, + "loss": 1.8576, + "step": 1824 + }, + { + "epoch": 0.31319718551570275, + "grad_norm": 2.755759963811611, + "learning_rate": 1.6080997543782063e-05, + "loss": 1.5347, + "step": 1825 + }, + { + "epoch": 0.31336880041187576, + "grad_norm": 2.6526205228509063, + "learning_rate": 1.6076584030850333e-05, + "loss": 1.701, + "step": 1826 + }, + { + "epoch": 0.3135404153080487, + "grad_norm": 2.814971385706839, + "learning_rate": 1.607216864052672e-05, + "loss": 1.9335, + "step": 1827 + }, + { + "epoch": 0.3137120302042217, + "grad_norm": 2.662273618993851, + "learning_rate": 1.6067751374175383e-05, + "loss": 1.8275, + "step": 1828 + }, + { + "epoch": 0.31388364510039474, + "grad_norm": 2.7467918376897575, + "learning_rate": 1.6063332233161055e-05, + "loss": 1.7759, + "step": 1829 + }, + { + "epoch": 0.3140552599965677, + "grad_norm": 2.737059645175828, + "learning_rate": 1.6058911218849058e-05, + "loss": 1.7352, + "step": 1830 + }, + { + "epoch": 0.3142268748927407, + "grad_norm": 2.9202493578050563, + "learning_rate": 1.6054488332605282e-05, + "loss": 1.8544, + "step": 1831 + }, + { + "epoch": 0.31439848978891366, + "grad_norm": 2.698061435397696, + "learning_rate": 1.6050063575796205e-05, + "loss": 1.6063, + "step": 1832 + }, + { + "epoch": 0.31457010468508667, + "grad_norm": 3.0613954905950136, + "learning_rate": 1.6045636949788875e-05, + "loss": 2.0303, + "step": 1833 + }, + { + "epoch": 0.3147417195812596, + "grad_norm": 3.6848696029277663, + "learning_rate": 1.6041208455950924e-05, + "loss": 1.7077, + "step": 1834 + }, + { + "epoch": 0.31491333447743264, + "grad_norm": 2.532169487777431, + "learning_rate": 1.603677809565055e-05, + "loss": 1.8292, + "step": 1835 + }, + { + "epoch": 0.31508494937360565, + "grad_norm": 2.5141319417602026, + "learning_rate": 1.6032345870256546e-05, + "loss": 1.591, + "step": 1836 + }, + { + "epoch": 0.3152565642697786, + "grad_norm": 2.8644374084060997, + "learning_rate": 1.6027911781138263e-05, + "loss": 1.5595, + "step": 1837 + }, + { + "epoch": 0.3154281791659516, + "grad_norm": 3.4260682868759376, + "learning_rate": 1.6023475829665643e-05, + "loss": 1.6461, + "step": 1838 + }, + { + "epoch": 0.3155997940621246, + "grad_norm": 2.157983539703558, + "learning_rate": 1.6019038017209186e-05, + "loss": 1.3875, + "step": 1839 + }, + { + "epoch": 0.3157714089582976, + "grad_norm": 2.776073279910533, + "learning_rate": 1.6014598345139992e-05, + "loss": 1.5049, + "step": 1840 + }, + { + "epoch": 0.3159430238544706, + "grad_norm": 2.647326741111861, + "learning_rate": 1.6010156814829704e-05, + "loss": 1.6051, + "step": 1841 + }, + { + "epoch": 0.31611463875064355, + "grad_norm": 3.4766660709705284, + "learning_rate": 1.6005713427650564e-05, + "loss": 1.9456, + "step": 1842 + }, + { + "epoch": 0.31628625364681656, + "grad_norm": 2.6280522302863423, + "learning_rate": 1.600126818497538e-05, + "loss": 1.7731, + "step": 1843 + }, + { + "epoch": 0.3164578685429895, + "grad_norm": 2.6315015095568346, + "learning_rate": 1.599682108817753e-05, + "loss": 1.6893, + "step": 1844 + }, + { + "epoch": 0.31662948343916253, + "grad_norm": 2.5175976048693265, + "learning_rate": 1.599237213863097e-05, + "loss": 1.4549, + "step": 1845 + }, + { + "epoch": 0.3168010983353355, + "grad_norm": 2.7836272032881113, + "learning_rate": 1.5987921337710224e-05, + "loss": 1.764, + "step": 1846 + }, + { + "epoch": 0.3169727132315085, + "grad_norm": 2.7474789235034867, + "learning_rate": 1.5983468686790386e-05, + "loss": 1.902, + "step": 1847 + }, + { + "epoch": 0.3171443281276815, + "grad_norm": 2.497226916613902, + "learning_rate": 1.5979014187247132e-05, + "loss": 1.7118, + "step": 1848 + }, + { + "epoch": 0.31731594302385446, + "grad_norm": 2.5860230214370237, + "learning_rate": 1.59745578404567e-05, + "loss": 1.7818, + "step": 1849 + }, + { + "epoch": 0.3174875579200275, + "grad_norm": 2.8044051230590212, + "learning_rate": 1.59700996477959e-05, + "loss": 1.3873, + "step": 1850 + }, + { + "epoch": 0.31765917281620043, + "grad_norm": 3.5705993055093193, + "learning_rate": 1.5965639610642117e-05, + "loss": 1.878, + "step": 1851 + }, + { + "epoch": 0.31783078771237344, + "grad_norm": 2.604514100015129, + "learning_rate": 1.5961177730373298e-05, + "loss": 1.8874, + "step": 1852 + }, + { + "epoch": 0.3180024026085464, + "grad_norm": 2.540428213850227, + "learning_rate": 1.5956714008367968e-05, + "loss": 1.5936, + "step": 1853 + }, + { + "epoch": 0.3181740175047194, + "grad_norm": 2.7223857558162554, + "learning_rate": 1.5952248446005213e-05, + "loss": 1.7457, + "step": 1854 + }, + { + "epoch": 0.3183456324008924, + "grad_norm": 2.6516437941224007, + "learning_rate": 1.5947781044664696e-05, + "loss": 1.758, + "step": 1855 + }, + { + "epoch": 0.3185172472970654, + "grad_norm": 2.3760378743832264, + "learning_rate": 1.5943311805726637e-05, + "loss": 1.6384, + "step": 1856 + }, + { + "epoch": 0.3186888621932384, + "grad_norm": 2.5776847113896117, + "learning_rate": 1.5938840730571836e-05, + "loss": 1.786, + "step": 1857 + }, + { + "epoch": 0.31886047708941134, + "grad_norm": 2.2439622164339497, + "learning_rate": 1.5934367820581654e-05, + "loss": 1.426, + "step": 1858 + }, + { + "epoch": 0.31903209198558435, + "grad_norm": 2.5053452595628007, + "learning_rate": 1.592989307713802e-05, + "loss": 1.8876, + "step": 1859 + }, + { + "epoch": 0.3192037068817573, + "grad_norm": 2.645167967187714, + "learning_rate": 1.5925416501623427e-05, + "loss": 1.5788, + "step": 1860 + }, + { + "epoch": 0.3193753217779303, + "grad_norm": 2.899862135235689, + "learning_rate": 1.5920938095420934e-05, + "loss": 1.5669, + "step": 1861 + }, + { + "epoch": 0.31954693667410333, + "grad_norm": 2.750590749978957, + "learning_rate": 1.591645785991417e-05, + "loss": 1.7793, + "step": 1862 + }, + { + "epoch": 0.3197185515702763, + "grad_norm": 2.8164409715169536, + "learning_rate": 1.5911975796487324e-05, + "loss": 1.7453, + "step": 1863 + }, + { + "epoch": 0.3198901664664493, + "grad_norm": 3.347725327106241, + "learning_rate": 1.5907491906525162e-05, + "loss": 1.9353, + "step": 1864 + }, + { + "epoch": 0.32006178136262226, + "grad_norm": 2.6253304162682403, + "learning_rate": 1.5903006191412996e-05, + "loss": 1.7538, + "step": 1865 + }, + { + "epoch": 0.32023339625879527, + "grad_norm": 3.4345456825590195, + "learning_rate": 1.5898518652536713e-05, + "loss": 1.9821, + "step": 1866 + }, + { + "epoch": 0.3204050111549683, + "grad_norm": 3.147781304132002, + "learning_rate": 1.589402929128276e-05, + "loss": 1.8258, + "step": 1867 + }, + { + "epoch": 0.32057662605114123, + "grad_norm": 3.146856291330822, + "learning_rate": 1.588953810903815e-05, + "loss": 1.9101, + "step": 1868 + }, + { + "epoch": 0.32074824094731424, + "grad_norm": 2.568333247726322, + "learning_rate": 1.588504510719046e-05, + "loss": 1.4917, + "step": 1869 + }, + { + "epoch": 0.3209198558434872, + "grad_norm": 3.3756964269092795, + "learning_rate": 1.588055028712782e-05, + "loss": 1.8438, + "step": 1870 + }, + { + "epoch": 0.3210914707396602, + "grad_norm": 2.872424762146188, + "learning_rate": 1.5876053650238927e-05, + "loss": 1.802, + "step": 1871 + }, + { + "epoch": 0.32126308563583317, + "grad_norm": 2.9384308438565214, + "learning_rate": 1.5871555197913045e-05, + "loss": 1.5228, + "step": 1872 + }, + { + "epoch": 0.3214347005320062, + "grad_norm": 2.49210532271169, + "learning_rate": 1.5867054931539995e-05, + "loss": 1.4623, + "step": 1873 + }, + { + "epoch": 0.3216063154281792, + "grad_norm": 2.895751148600379, + "learning_rate": 1.5862552852510152e-05, + "loss": 1.9324, + "step": 1874 + }, + { + "epoch": 0.32177793032435215, + "grad_norm": 2.9423350219662696, + "learning_rate": 1.5858048962214457e-05, + "loss": 1.7377, + "step": 1875 + }, + { + "epoch": 0.32194954522052516, + "grad_norm": 3.367244935430937, + "learning_rate": 1.585354326204442e-05, + "loss": 1.8729, + "step": 1876 + }, + { + "epoch": 0.3221211601166981, + "grad_norm": 2.4835125115604066, + "learning_rate": 1.5849035753392085e-05, + "loss": 1.8261, + "step": 1877 + }, + { + "epoch": 0.3222927750128711, + "grad_norm": 2.5006489815683213, + "learning_rate": 1.5844526437650077e-05, + "loss": 1.698, + "step": 1878 + }, + { + "epoch": 0.3224643899090441, + "grad_norm": 3.0168484659777333, + "learning_rate": 1.584001531621158e-05, + "loss": 1.9001, + "step": 1879 + }, + { + "epoch": 0.3226360048052171, + "grad_norm": 3.108095293381667, + "learning_rate": 1.5835502390470313e-05, + "loss": 1.704, + "step": 1880 + }, + { + "epoch": 0.3228076197013901, + "grad_norm": 3.7276105585304857, + "learning_rate": 1.583098766182058e-05, + "loss": 1.6952, + "step": 1881 + }, + { + "epoch": 0.32297923459756306, + "grad_norm": 2.3685535660673667, + "learning_rate": 1.5826471131657227e-05, + "loss": 1.5413, + "step": 1882 + }, + { + "epoch": 0.32315084949373607, + "grad_norm": 2.7584718545490277, + "learning_rate": 1.5821952801375653e-05, + "loss": 1.6635, + "step": 1883 + }, + { + "epoch": 0.323322464389909, + "grad_norm": 2.928502150111893, + "learning_rate": 1.5817432672371826e-05, + "loss": 1.8562, + "step": 1884 + }, + { + "epoch": 0.32349407928608204, + "grad_norm": 2.5635450690970223, + "learning_rate": 1.581291074604226e-05, + "loss": 1.7794, + "step": 1885 + }, + { + "epoch": 0.323665694182255, + "grad_norm": 2.5824971920222155, + "learning_rate": 1.5808387023784025e-05, + "loss": 1.5943, + "step": 1886 + }, + { + "epoch": 0.323837309078428, + "grad_norm": 2.8634913336222794, + "learning_rate": 1.5803861506994755e-05, + "loss": 1.8735, + "step": 1887 + }, + { + "epoch": 0.324008923974601, + "grad_norm": 2.8348081198018935, + "learning_rate": 1.579933419707263e-05, + "loss": 1.8429, + "step": 1888 + }, + { + "epoch": 0.32418053887077397, + "grad_norm": 2.764798734979702, + "learning_rate": 1.579480509541638e-05, + "loss": 1.6969, + "step": 1889 + }, + { + "epoch": 0.324352153766947, + "grad_norm": 3.7615717487044424, + "learning_rate": 1.5790274203425302e-05, + "loss": 1.7232, + "step": 1890 + }, + { + "epoch": 0.32452376866311994, + "grad_norm": 3.2134527252429295, + "learning_rate": 1.5785741522499232e-05, + "loss": 1.8074, + "step": 1891 + }, + { + "epoch": 0.32469538355929295, + "grad_norm": 3.321026230072206, + "learning_rate": 1.5781207054038567e-05, + "loss": 1.8601, + "step": 1892 + }, + { + "epoch": 0.32486699845546596, + "grad_norm": 2.9492855961278712, + "learning_rate": 1.5776670799444257e-05, + "loss": 1.7617, + "step": 1893 + }, + { + "epoch": 0.3250386133516389, + "grad_norm": 2.391870319321163, + "learning_rate": 1.5772132760117797e-05, + "loss": 1.7567, + "step": 1894 + }, + { + "epoch": 0.3252102282478119, + "grad_norm": 19.824506052124576, + "learning_rate": 1.576759293746124e-05, + "loss": 1.6844, + "step": 1895 + }, + { + "epoch": 0.3253818431439849, + "grad_norm": 2.6017121668062444, + "learning_rate": 1.576305133287719e-05, + "loss": 1.5208, + "step": 1896 + }, + { + "epoch": 0.3255534580401579, + "grad_norm": 2.078778427972936, + "learning_rate": 1.5758507947768794e-05, + "loss": 1.6443, + "step": 1897 + }, + { + "epoch": 0.32572507293633085, + "grad_norm": 2.643878666518309, + "learning_rate": 1.5753962783539755e-05, + "loss": 1.6504, + "step": 1898 + }, + { + "epoch": 0.32589668783250386, + "grad_norm": 3.038387434675897, + "learning_rate": 1.5749415841594328e-05, + "loss": 1.5466, + "step": 1899 + }, + { + "epoch": 0.3260683027286769, + "grad_norm": 3.5630501290476975, + "learning_rate": 1.5744867123337313e-05, + "loss": 1.9429, + "step": 1900 + }, + { + "epoch": 0.32623991762484983, + "grad_norm": 2.6487908946909977, + "learning_rate": 1.5740316630174057e-05, + "loss": 1.743, + "step": 1901 + }, + { + "epoch": 0.32641153252102284, + "grad_norm": 4.256919439739053, + "learning_rate": 1.573576436351046e-05, + "loss": 1.4377, + "step": 1902 + }, + { + "epoch": 0.3265831474171958, + "grad_norm": 2.7554072056259042, + "learning_rate": 1.573121032475297e-05, + "loss": 1.7038, + "step": 1903 + }, + { + "epoch": 0.3267547623133688, + "grad_norm": 2.503034490131831, + "learning_rate": 1.5726654515308584e-05, + "loss": 1.7936, + "step": 1904 + }, + { + "epoch": 0.32692637720954176, + "grad_norm": 2.487938973126537, + "learning_rate": 1.5722096936584834e-05, + "loss": 1.6215, + "step": 1905 + }, + { + "epoch": 0.3270979921057148, + "grad_norm": 3.861228850303636, + "learning_rate": 1.5717537589989813e-05, + "loss": 1.7414, + "step": 1906 + }, + { + "epoch": 0.3272696070018878, + "grad_norm": 3.6118083360444397, + "learning_rate": 1.571297647693215e-05, + "loss": 1.8911, + "step": 1907 + }, + { + "epoch": 0.32744122189806074, + "grad_norm": 3.741059011806432, + "learning_rate": 1.570841359882103e-05, + "loss": 1.6635, + "step": 1908 + }, + { + "epoch": 0.32761283679423375, + "grad_norm": 3.259956696249125, + "learning_rate": 1.5703848957066178e-05, + "loss": 1.7691, + "step": 1909 + }, + { + "epoch": 0.3277844516904067, + "grad_norm": 2.4287767734968564, + "learning_rate": 1.5699282553077855e-05, + "loss": 1.7686, + "step": 1910 + }, + { + "epoch": 0.3279560665865797, + "grad_norm": 2.4410890155679907, + "learning_rate": 1.5694714388266885e-05, + "loss": 1.7687, + "step": 1911 + }, + { + "epoch": 0.3281276814827527, + "grad_norm": 2.7041966544528284, + "learning_rate": 1.569014446404462e-05, + "loss": 1.895, + "step": 1912 + }, + { + "epoch": 0.3282992963789257, + "grad_norm": 2.6875235258849757, + "learning_rate": 1.5685572781822965e-05, + "loss": 1.6322, + "step": 1913 + }, + { + "epoch": 0.3284709112750987, + "grad_norm": 2.7392496188784095, + "learning_rate": 1.568099934301436e-05, + "loss": 1.8038, + "step": 1914 + }, + { + "epoch": 0.32864252617127165, + "grad_norm": 2.6903997978654797, + "learning_rate": 1.5676424149031798e-05, + "loss": 1.7666, + "step": 1915 + }, + { + "epoch": 0.32881414106744467, + "grad_norm": 3.286426688341425, + "learning_rate": 1.5671847201288804e-05, + "loss": 1.7416, + "step": 1916 + }, + { + "epoch": 0.3289857559636176, + "grad_norm": 3.6508779053828895, + "learning_rate": 1.566726850119945e-05, + "loss": 1.619, + "step": 1917 + }, + { + "epoch": 0.32915737085979063, + "grad_norm": 2.3118380129352367, + "learning_rate": 1.566268805017836e-05, + "loss": 1.4385, + "step": 1918 + }, + { + "epoch": 0.32932898575596364, + "grad_norm": 2.9656824861662607, + "learning_rate": 1.565810584964067e-05, + "loss": 2.0173, + "step": 1919 + }, + { + "epoch": 0.3295006006521366, + "grad_norm": 3.488396055734131, + "learning_rate": 1.5653521901002084e-05, + "loss": 1.7711, + "step": 1920 + }, + { + "epoch": 0.3296722155483096, + "grad_norm": 2.9336110109305236, + "learning_rate": 1.564893620567884e-05, + "loss": 1.4975, + "step": 1921 + }, + { + "epoch": 0.32984383044448257, + "grad_norm": 2.325084121870113, + "learning_rate": 1.5644348765087704e-05, + "loss": 1.379, + "step": 1922 + }, + { + "epoch": 0.3300154453406556, + "grad_norm": 3.4427731001891595, + "learning_rate": 1.563975958064599e-05, + "loss": 1.7365, + "step": 1923 + }, + { + "epoch": 0.33018706023682853, + "grad_norm": 2.9082236290856414, + "learning_rate": 1.5635168653771564e-05, + "loss": 1.7818, + "step": 1924 + }, + { + "epoch": 0.33035867513300154, + "grad_norm": 10.580924998264482, + "learning_rate": 1.5630575985882793e-05, + "loss": 1.6502, + "step": 1925 + }, + { + "epoch": 0.33053029002917456, + "grad_norm": 2.4764236020524204, + "learning_rate": 1.5625981578398624e-05, + "loss": 1.6864, + "step": 1926 + }, + { + "epoch": 0.3307019049253475, + "grad_norm": 3.1169721081121198, + "learning_rate": 1.5621385432738516e-05, + "loss": 1.6375, + "step": 1927 + }, + { + "epoch": 0.3308735198215205, + "grad_norm": 3.5357190805063747, + "learning_rate": 1.5616787550322473e-05, + "loss": 1.6085, + "step": 1928 + }, + { + "epoch": 0.3310451347176935, + "grad_norm": 3.444090357473441, + "learning_rate": 1.561218793257103e-05, + "loss": 1.8197, + "step": 1929 + }, + { + "epoch": 0.3312167496138665, + "grad_norm": 3.535636207498067, + "learning_rate": 1.5607586580905273e-05, + "loss": 1.6458, + "step": 1930 + }, + { + "epoch": 0.33138836451003945, + "grad_norm": 2.5580470797877384, + "learning_rate": 1.5602983496746805e-05, + "loss": 1.7406, + "step": 1931 + }, + { + "epoch": 0.33155997940621246, + "grad_norm": 2.533293873721004, + "learning_rate": 1.5598378681517772e-05, + "loss": 1.8296, + "step": 1932 + }, + { + "epoch": 0.33173159430238547, + "grad_norm": 3.100048834019278, + "learning_rate": 1.559377213664086e-05, + "loss": 1.5878, + "step": 1933 + }, + { + "epoch": 0.3319032091985584, + "grad_norm": 2.41638592842297, + "learning_rate": 1.558916386353928e-05, + "loss": 1.7117, + "step": 1934 + }, + { + "epoch": 0.33207482409473144, + "grad_norm": 3.4660030647597546, + "learning_rate": 1.5584553863636783e-05, + "loss": 1.5978, + "step": 1935 + }, + { + "epoch": 0.3322464389909044, + "grad_norm": 2.628983376924701, + "learning_rate": 1.557994213835766e-05, + "loss": 1.8324, + "step": 1936 + }, + { + "epoch": 0.3324180538870774, + "grad_norm": 2.626539935619845, + "learning_rate": 1.5575328689126714e-05, + "loss": 1.7423, + "step": 1937 + }, + { + "epoch": 0.33258966878325036, + "grad_norm": 2.981021549860697, + "learning_rate": 1.5570713517369305e-05, + "loss": 1.656, + "step": 1938 + }, + { + "epoch": 0.33276128367942337, + "grad_norm": 2.6712964260904624, + "learning_rate": 1.5566096624511306e-05, + "loss": 1.6129, + "step": 1939 + }, + { + "epoch": 0.3329328985755964, + "grad_norm": 3.7767999352759816, + "learning_rate": 1.5561478011979133e-05, + "loss": 2.0338, + "step": 1940 + }, + { + "epoch": 0.33310451347176934, + "grad_norm": 2.752406480350029, + "learning_rate": 1.555685768119973e-05, + "loss": 1.646, + "step": 1941 + }, + { + "epoch": 0.33327612836794235, + "grad_norm": 2.7957883842159035, + "learning_rate": 1.5552235633600576e-05, + "loss": 1.8039, + "step": 1942 + }, + { + "epoch": 0.3334477432641153, + "grad_norm": 2.1821824060511137, + "learning_rate": 1.5547611870609672e-05, + "loss": 1.5384, + "step": 1943 + }, + { + "epoch": 0.3336193581602883, + "grad_norm": 2.5623110844738504, + "learning_rate": 1.554298639365555e-05, + "loss": 1.834, + "step": 1944 + }, + { + "epoch": 0.3337909730564613, + "grad_norm": 2.248116150945698, + "learning_rate": 1.5538359204167285e-05, + "loss": 1.6514, + "step": 1945 + }, + { + "epoch": 0.3339625879526343, + "grad_norm": 2.700377628007367, + "learning_rate": 1.553373030357446e-05, + "loss": 1.7759, + "step": 1946 + }, + { + "epoch": 0.3341342028488073, + "grad_norm": 3.4977059833200115, + "learning_rate": 1.5529099693307205e-05, + "loss": 1.864, + "step": 1947 + }, + { + "epoch": 0.33430581774498025, + "grad_norm": 3.2840104204802123, + "learning_rate": 1.5524467374796165e-05, + "loss": 1.5823, + "step": 1948 + }, + { + "epoch": 0.33447743264115326, + "grad_norm": 3.5584161556558014, + "learning_rate": 1.5519833349472523e-05, + "loss": 1.69, + "step": 1949 + }, + { + "epoch": 0.3346490475373262, + "grad_norm": 2.6078108692928343, + "learning_rate": 1.5515197618767985e-05, + "loss": 1.8914, + "step": 1950 + }, + { + "epoch": 0.3348206624334992, + "grad_norm": 2.7110173387307137, + "learning_rate": 1.5510560184114785e-05, + "loss": 1.9239, + "step": 1951 + }, + { + "epoch": 0.33499227732967224, + "grad_norm": 2.8094977439747564, + "learning_rate": 1.5505921046945677e-05, + "loss": 1.6266, + "step": 1952 + }, + { + "epoch": 0.3351638922258452, + "grad_norm": 2.834909679423824, + "learning_rate": 1.5501280208693948e-05, + "loss": 1.5762, + "step": 1953 + }, + { + "epoch": 0.3353355071220182, + "grad_norm": 2.674892517060095, + "learning_rate": 1.5496637670793415e-05, + "loss": 2.0049, + "step": 1954 + }, + { + "epoch": 0.33550712201819116, + "grad_norm": 3.1138810121668072, + "learning_rate": 1.5491993434678404e-05, + "loss": 1.7359, + "step": 1955 + }, + { + "epoch": 0.3356787369143642, + "grad_norm": 3.103606516363571, + "learning_rate": 1.5487347501783783e-05, + "loss": 1.6512, + "step": 1956 + }, + { + "epoch": 0.33585035181053713, + "grad_norm": 2.43027994642771, + "learning_rate": 1.5482699873544937e-05, + "loss": 1.8532, + "step": 1957 + }, + { + "epoch": 0.33602196670671014, + "grad_norm": 2.6413441206454977, + "learning_rate": 1.547805055139777e-05, + "loss": 1.7201, + "step": 1958 + }, + { + "epoch": 0.33619358160288315, + "grad_norm": 4.0106005749293825, + "learning_rate": 1.5473399536778715e-05, + "loss": 1.7079, + "step": 1959 + }, + { + "epoch": 0.3363651964990561, + "grad_norm": 3.206214786988806, + "learning_rate": 1.546874683112473e-05, + "loss": 1.7608, + "step": 1960 + }, + { + "epoch": 0.3365368113952291, + "grad_norm": 2.6561795997581634, + "learning_rate": 1.546409243587329e-05, + "loss": 1.8837, + "step": 1961 + }, + { + "epoch": 0.3367084262914021, + "grad_norm": 2.58641854118283, + "learning_rate": 1.5459436352462395e-05, + "loss": 1.6578, + "step": 1962 + }, + { + "epoch": 0.3368800411875751, + "grad_norm": 2.768846127982534, + "learning_rate": 1.5454778582330567e-05, + "loss": 1.6507, + "step": 1963 + }, + { + "epoch": 0.3370516560837481, + "grad_norm": 3.2016114245323686, + "learning_rate": 1.5450119126916844e-05, + "loss": 1.6859, + "step": 1964 + }, + { + "epoch": 0.33722327097992105, + "grad_norm": 4.707555669442656, + "learning_rate": 1.544545798766079e-05, + "loss": 1.8789, + "step": 1965 + }, + { + "epoch": 0.33739488587609406, + "grad_norm": 4.461137310127853, + "learning_rate": 1.5440795166002492e-05, + "loss": 1.9813, + "step": 1966 + }, + { + "epoch": 0.337566500772267, + "grad_norm": 2.3129466837544266, + "learning_rate": 1.543613066338255e-05, + "loss": 1.711, + "step": 1967 + }, + { + "epoch": 0.33773811566844003, + "grad_norm": 3.7437345946752036, + "learning_rate": 1.5431464481242085e-05, + "loss": 1.8767, + "step": 1968 + }, + { + "epoch": 0.337909730564613, + "grad_norm": 2.3874183390289034, + "learning_rate": 1.5426796621022746e-05, + "loss": 1.6342, + "step": 1969 + }, + { + "epoch": 0.338081345460786, + "grad_norm": 2.4332292102526085, + "learning_rate": 1.542212708416668e-05, + "loss": 1.5295, + "step": 1970 + }, + { + "epoch": 0.338252960356959, + "grad_norm": 2.79228284272914, + "learning_rate": 1.541745587211657e-05, + "loss": 1.6769, + "step": 1971 + }, + { + "epoch": 0.33842457525313197, + "grad_norm": 2.871427530232779, + "learning_rate": 1.541278298631561e-05, + "loss": 1.5641, + "step": 1972 + }, + { + "epoch": 0.338596190149305, + "grad_norm": 2.5055446947492768, + "learning_rate": 1.5408108428207512e-05, + "loss": 1.3681, + "step": 1973 + }, + { + "epoch": 0.33876780504547793, + "grad_norm": 2.6964311460399637, + "learning_rate": 1.540343219923651e-05, + "loss": 1.8414, + "step": 1974 + }, + { + "epoch": 0.33893941994165094, + "grad_norm": 2.577644863267545, + "learning_rate": 1.5398754300847346e-05, + "loss": 1.7075, + "step": 1975 + }, + { + "epoch": 0.3391110348378239, + "grad_norm": 2.516837313293274, + "learning_rate": 1.5394074734485275e-05, + "loss": 1.7644, + "step": 1976 + }, + { + "epoch": 0.3392826497339969, + "grad_norm": 2.763000775632793, + "learning_rate": 1.538939350159608e-05, + "loss": 1.8224, + "step": 1977 + }, + { + "epoch": 0.3394542646301699, + "grad_norm": 2.697539371400862, + "learning_rate": 1.538471060362605e-05, + "loss": 1.4923, + "step": 1978 + }, + { + "epoch": 0.3396258795263429, + "grad_norm": 3.3502490194731966, + "learning_rate": 1.538002604202199e-05, + "loss": 1.7466, + "step": 1979 + }, + { + "epoch": 0.3397974944225159, + "grad_norm": 2.3468363389541684, + "learning_rate": 1.5375339818231213e-05, + "loss": 1.6252, + "step": 1980 + }, + { + "epoch": 0.33996910931868884, + "grad_norm": 2.7686591333033763, + "learning_rate": 1.5370651933701566e-05, + "loss": 1.5589, + "step": 1981 + }, + { + "epoch": 0.34014072421486186, + "grad_norm": 3.116237920971837, + "learning_rate": 1.5365962389881386e-05, + "loss": 1.7829, + "step": 1982 + }, + { + "epoch": 0.3403123391110348, + "grad_norm": 3.0234370384875153, + "learning_rate": 1.536127118821953e-05, + "loss": 1.7208, + "step": 1983 + }, + { + "epoch": 0.3404839540072078, + "grad_norm": 2.516128941965993, + "learning_rate": 1.5356578330165377e-05, + "loss": 1.6726, + "step": 1984 + }, + { + "epoch": 0.34065556890338083, + "grad_norm": 2.5691896583270966, + "learning_rate": 1.53518838171688e-05, + "loss": 1.7392, + "step": 1985 + }, + { + "epoch": 0.3408271837995538, + "grad_norm": 2.608913854061275, + "learning_rate": 1.53471876506802e-05, + "loss": 1.8245, + "step": 1986 + }, + { + "epoch": 0.3409987986957268, + "grad_norm": 2.564202386189652, + "learning_rate": 1.534248983215048e-05, + "loss": 1.7224, + "step": 1987 + }, + { + "epoch": 0.34117041359189976, + "grad_norm": 3.1016738123791434, + "learning_rate": 1.533779036303105e-05, + "loss": 1.8091, + "step": 1988 + }, + { + "epoch": 0.34134202848807277, + "grad_norm": 3.2323081803279052, + "learning_rate": 1.5333089244773844e-05, + "loss": 1.7104, + "step": 1989 + }, + { + "epoch": 0.3415136433842458, + "grad_norm": 2.9098203649235654, + "learning_rate": 1.5328386478831295e-05, + "loss": 1.6782, + "step": 1990 + }, + { + "epoch": 0.34168525828041874, + "grad_norm": 3.172383459169986, + "learning_rate": 1.532368206665634e-05, + "loss": 1.7733, + "step": 1991 + }, + { + "epoch": 0.34185687317659175, + "grad_norm": 3.656300195554205, + "learning_rate": 1.531897600970243e-05, + "loss": 1.7086, + "step": 1992 + }, + { + "epoch": 0.3420284880727647, + "grad_norm": 2.6344018311027892, + "learning_rate": 1.531426830942354e-05, + "loss": 1.8706, + "step": 1993 + }, + { + "epoch": 0.3422001029689377, + "grad_norm": 4.22851473057233, + "learning_rate": 1.5309558967274128e-05, + "loss": 1.8577, + "step": 1994 + }, + { + "epoch": 0.34237171786511067, + "grad_norm": 2.7353850524020715, + "learning_rate": 1.5304847984709166e-05, + "loss": 1.7469, + "step": 1995 + }, + { + "epoch": 0.3425433327612837, + "grad_norm": 2.680517246832729, + "learning_rate": 1.5300135363184145e-05, + "loss": 1.5369, + "step": 1996 + }, + { + "epoch": 0.3427149476574567, + "grad_norm": 2.8979963835467095, + "learning_rate": 1.5295421104155047e-05, + "loss": 1.5516, + "step": 1997 + }, + { + "epoch": 0.34288656255362965, + "grad_norm": 3.0841503793201888, + "learning_rate": 1.529070520907837e-05, + "loss": 1.757, + "step": 1998 + }, + { + "epoch": 0.34305817744980266, + "grad_norm": 3.082521287986174, + "learning_rate": 1.5285987679411114e-05, + "loss": 1.8098, + "step": 1999 + }, + { + "epoch": 0.3432297923459756, + "grad_norm": 3.029738568939894, + "learning_rate": 1.528126851661078e-05, + "loss": 1.5466, + "step": 2000 + }, + { + "epoch": 0.3434014072421486, + "grad_norm": 2.9328255696848897, + "learning_rate": 1.5276547722135385e-05, + "loss": 1.4992, + "step": 2001 + }, + { + "epoch": 0.3435730221383216, + "grad_norm": 3.014343216455571, + "learning_rate": 1.5271825297443436e-05, + "loss": 1.7196, + "step": 2002 + }, + { + "epoch": 0.3437446370344946, + "grad_norm": 2.92714191232286, + "learning_rate": 1.5267101243993954e-05, + "loss": 1.7171, + "step": 2003 + }, + { + "epoch": 0.3439162519306676, + "grad_norm": 2.8871505875491783, + "learning_rate": 1.5262375563246457e-05, + "loss": 1.5217, + "step": 2004 + }, + { + "epoch": 0.34408786682684056, + "grad_norm": 3.2902156930892623, + "learning_rate": 1.525764825666097e-05, + "loss": 1.8849, + "step": 2005 + }, + { + "epoch": 0.34425948172301357, + "grad_norm": 3.4048275595491377, + "learning_rate": 1.5252919325698022e-05, + "loss": 1.692, + "step": 2006 + }, + { + "epoch": 0.3444310966191865, + "grad_norm": 3.002158590807369, + "learning_rate": 1.524818877181863e-05, + "loss": 1.7875, + "step": 2007 + }, + { + "epoch": 0.34460271151535954, + "grad_norm": 2.854718207276763, + "learning_rate": 1.5243456596484339e-05, + "loss": 1.7951, + "step": 2008 + }, + { + "epoch": 0.3447743264115325, + "grad_norm": 2.4585399399326, + "learning_rate": 1.5238722801157164e-05, + "loss": 1.8094, + "step": 2009 + }, + { + "epoch": 0.3449459413077055, + "grad_norm": 3.269496738087618, + "learning_rate": 1.5233987387299643e-05, + "loss": 1.7617, + "step": 2010 + }, + { + "epoch": 0.3451175562038785, + "grad_norm": 3.1029576986317187, + "learning_rate": 1.5229250356374804e-05, + "loss": 1.7609, + "step": 2011 + }, + { + "epoch": 0.3452891711000515, + "grad_norm": 2.9945120369588176, + "learning_rate": 1.5224511709846176e-05, + "loss": 1.7917, + "step": 2012 + }, + { + "epoch": 0.3454607859962245, + "grad_norm": 3.4834330715867123, + "learning_rate": 1.5219771449177794e-05, + "loss": 1.72, + "step": 2013 + }, + { + "epoch": 0.34563240089239744, + "grad_norm": 2.695216788358231, + "learning_rate": 1.5215029575834182e-05, + "loss": 1.7781, + "step": 2014 + }, + { + "epoch": 0.34580401578857045, + "grad_norm": 2.7967827467215365, + "learning_rate": 1.5210286091280364e-05, + "loss": 1.841, + "step": 2015 + }, + { + "epoch": 0.34597563068474346, + "grad_norm": 2.2228868569973383, + "learning_rate": 1.5205540996981865e-05, + "loss": 1.4912, + "step": 2016 + }, + { + "epoch": 0.3461472455809164, + "grad_norm": 3.5814295288562366, + "learning_rate": 1.5200794294404709e-05, + "loss": 1.6561, + "step": 2017 + }, + { + "epoch": 0.34631886047708943, + "grad_norm": 3.406112532646996, + "learning_rate": 1.5196045985015414e-05, + "loss": 1.937, + "step": 2018 + }, + { + "epoch": 0.3464904753732624, + "grad_norm": 3.1318675983162025, + "learning_rate": 1.519129607028099e-05, + "loss": 1.6298, + "step": 2019 + }, + { + "epoch": 0.3466620902694354, + "grad_norm": 2.6200220724003436, + "learning_rate": 1.5186544551668955e-05, + "loss": 1.9008, + "step": 2020 + }, + { + "epoch": 0.34683370516560835, + "grad_norm": 2.97887899292917, + "learning_rate": 1.5181791430647306e-05, + "loss": 1.71, + "step": 2021 + }, + { + "epoch": 0.34700532006178136, + "grad_norm": 2.6720768431569835, + "learning_rate": 1.5177036708684551e-05, + "loss": 1.9192, + "step": 2022 + }, + { + "epoch": 0.3471769349579544, + "grad_norm": 2.675272761796172, + "learning_rate": 1.5172280387249685e-05, + "loss": 1.6388, + "step": 2023 + }, + { + "epoch": 0.34734854985412733, + "grad_norm": 2.7498697524694884, + "learning_rate": 1.5167522467812194e-05, + "loss": 1.8369, + "step": 2024 + }, + { + "epoch": 0.34752016475030034, + "grad_norm": 3.560804837565884, + "learning_rate": 1.5162762951842068e-05, + "loss": 1.784, + "step": 2025 + }, + { + "epoch": 0.3476917796464733, + "grad_norm": 2.955387252319994, + "learning_rate": 1.5158001840809776e-05, + "loss": 1.7898, + "step": 2026 + }, + { + "epoch": 0.3478633945426463, + "grad_norm": 2.099486300165839, + "learning_rate": 1.5153239136186297e-05, + "loss": 1.4766, + "step": 2027 + }, + { + "epoch": 0.34803500943881926, + "grad_norm": 4.120103882263827, + "learning_rate": 1.5148474839443084e-05, + "loss": 1.8699, + "step": 2028 + }, + { + "epoch": 0.3482066243349923, + "grad_norm": 4.002060875482322, + "learning_rate": 1.5143708952052099e-05, + "loss": 1.7598, + "step": 2029 + }, + { + "epoch": 0.3483782392311653, + "grad_norm": 2.69509799145148, + "learning_rate": 1.5138941475485779e-05, + "loss": 1.7916, + "step": 2030 + }, + { + "epoch": 0.34854985412733824, + "grad_norm": 2.9079292525794873, + "learning_rate": 1.5134172411217067e-05, + "loss": 1.6887, + "step": 2031 + }, + { + "epoch": 0.34872146902351125, + "grad_norm": 2.342411628101549, + "learning_rate": 1.5129401760719393e-05, + "loss": 1.6308, + "step": 2032 + }, + { + "epoch": 0.3488930839196842, + "grad_norm": 2.5179014930477526, + "learning_rate": 1.5124629525466664e-05, + "loss": 1.5402, + "step": 2033 + }, + { + "epoch": 0.3490646988158572, + "grad_norm": 2.5460701327780955, + "learning_rate": 1.511985570693329e-05, + "loss": 1.6214, + "step": 2034 + }, + { + "epoch": 0.3492363137120302, + "grad_norm": 3.2936216283547908, + "learning_rate": 1.5115080306594172e-05, + "loss": 1.6879, + "step": 2035 + }, + { + "epoch": 0.3494079286082032, + "grad_norm": 2.6621086998012973, + "learning_rate": 1.5110303325924686e-05, + "loss": 1.6966, + "step": 2036 + }, + { + "epoch": 0.3495795435043762, + "grad_norm": 3.760328579284915, + "learning_rate": 1.5105524766400717e-05, + "loss": 1.5663, + "step": 2037 + }, + { + "epoch": 0.34975115840054916, + "grad_norm": 3.0909287373416454, + "learning_rate": 1.5100744629498611e-05, + "loss": 1.7214, + "step": 2038 + }, + { + "epoch": 0.34992277329672217, + "grad_norm": 3.2255819420554452, + "learning_rate": 1.5095962916695227e-05, + "loss": 1.7462, + "step": 2039 + }, + { + "epoch": 0.3500943881928951, + "grad_norm": 3.859164936293867, + "learning_rate": 1.5091179629467894e-05, + "loss": 2.0157, + "step": 2040 + }, + { + "epoch": 0.35026600308906813, + "grad_norm": 3.7505339654604204, + "learning_rate": 1.5086394769294437e-05, + "loss": 1.709, + "step": 2041 + }, + { + "epoch": 0.35043761798524115, + "grad_norm": 2.323223409781269, + "learning_rate": 1.5081608337653157e-05, + "loss": 1.4799, + "step": 2042 + }, + { + "epoch": 0.3506092328814141, + "grad_norm": 3.9169386800850434, + "learning_rate": 1.5076820336022855e-05, + "loss": 1.7923, + "step": 2043 + }, + { + "epoch": 0.3507808477775871, + "grad_norm": 2.5083349690438252, + "learning_rate": 1.5072030765882804e-05, + "loss": 1.5316, + "step": 2044 + }, + { + "epoch": 0.35095246267376007, + "grad_norm": 3.0950812152066747, + "learning_rate": 1.5067239628712764e-05, + "loss": 1.6214, + "step": 2045 + }, + { + "epoch": 0.3511240775699331, + "grad_norm": 2.83291084959603, + "learning_rate": 1.5062446925992987e-05, + "loss": 1.7143, + "step": 2046 + }, + { + "epoch": 0.35129569246610604, + "grad_norm": 3.865533320579865, + "learning_rate": 1.5057652659204198e-05, + "loss": 1.7135, + "step": 2047 + }, + { + "epoch": 0.35146730736227905, + "grad_norm": 3.417673960643634, + "learning_rate": 1.5052856829827613e-05, + "loss": 1.9202, + "step": 2048 + }, + { + "epoch": 0.35163892225845206, + "grad_norm": 2.392249161136908, + "learning_rate": 1.5048059439344931e-05, + "loss": 1.5183, + "step": 2049 + }, + { + "epoch": 0.351810537154625, + "grad_norm": 2.4482902650331186, + "learning_rate": 1.5043260489238325e-05, + "loss": 1.4724, + "step": 2050 + }, + { + "epoch": 0.351982152050798, + "grad_norm": 3.002896171553363, + "learning_rate": 1.5038459980990459e-05, + "loss": 1.7874, + "step": 2051 + }, + { + "epoch": 0.352153766946971, + "grad_norm": 2.8087411566253118, + "learning_rate": 1.503365791608447e-05, + "loss": 1.6975, + "step": 2052 + }, + { + "epoch": 0.352325381843144, + "grad_norm": 2.4170815033256496, + "learning_rate": 1.5028854296003987e-05, + "loss": 1.8364, + "step": 2053 + }, + { + "epoch": 0.35249699673931695, + "grad_norm": 3.264864338389202, + "learning_rate": 1.5024049122233112e-05, + "loss": 1.527, + "step": 2054 + }, + { + "epoch": 0.35266861163548996, + "grad_norm": 2.866232775421392, + "learning_rate": 1.501924239625642e-05, + "loss": 1.5965, + "step": 2055 + }, + { + "epoch": 0.35284022653166297, + "grad_norm": 2.314526606451571, + "learning_rate": 1.5014434119558988e-05, + "loss": 1.2629, + "step": 2056 + }, + { + "epoch": 0.3530118414278359, + "grad_norm": 3.429784914737218, + "learning_rate": 1.5009624293626343e-05, + "loss": 1.6308, + "step": 2057 + }, + { + "epoch": 0.35318345632400894, + "grad_norm": 2.971184465812407, + "learning_rate": 1.5004812919944515e-05, + "loss": 1.8593, + "step": 2058 + }, + { + "epoch": 0.3533550712201819, + "grad_norm": 2.3982470302601238, + "learning_rate": 1.5000000000000002e-05, + "loss": 1.462, + "step": 2059 + }, + { + "epoch": 0.3535266861163549, + "grad_norm": 2.8484372719852504, + "learning_rate": 1.4995185535279775e-05, + "loss": 1.8415, + "step": 2060 + }, + { + "epoch": 0.35369830101252786, + "grad_norm": 2.599762066061954, + "learning_rate": 1.4990369527271294e-05, + "loss": 1.625, + "step": 2061 + }, + { + "epoch": 0.35386991590870087, + "grad_norm": 3.10493471529143, + "learning_rate": 1.4985551977462484e-05, + "loss": 1.84, + "step": 2062 + }, + { + "epoch": 0.3540415308048739, + "grad_norm": 3.021332212801247, + "learning_rate": 1.4980732887341756e-05, + "loss": 1.5465, + "step": 2063 + }, + { + "epoch": 0.35421314570104684, + "grad_norm": 2.768989936483566, + "learning_rate": 1.4975912258397988e-05, + "loss": 1.7901, + "step": 2064 + }, + { + "epoch": 0.35438476059721985, + "grad_norm": 2.558841054645275, + "learning_rate": 1.4971090092120544e-05, + "loss": 1.7264, + "step": 2065 + }, + { + "epoch": 0.3545563754933928, + "grad_norm": 2.8361183453990755, + "learning_rate": 1.496626638999925e-05, + "loss": 1.6502, + "step": 2066 + }, + { + "epoch": 0.3547279903895658, + "grad_norm": 2.6257259033632967, + "learning_rate": 1.4961441153524421e-05, + "loss": 1.7741, + "step": 2067 + }, + { + "epoch": 0.35489960528573883, + "grad_norm": 3.1519146906299635, + "learning_rate": 1.4956614384186837e-05, + "loss": 1.9824, + "step": 2068 + }, + { + "epoch": 0.3550712201819118, + "grad_norm": 2.701134143660292, + "learning_rate": 1.4951786083477746e-05, + "loss": 1.8847, + "step": 2069 + }, + { + "epoch": 0.3552428350780848, + "grad_norm": 2.4973986732019595, + "learning_rate": 1.4946956252888887e-05, + "loss": 1.7222, + "step": 2070 + }, + { + "epoch": 0.35541444997425775, + "grad_norm": 3.5328549448900732, + "learning_rate": 1.4942124893912453e-05, + "loss": 1.6673, + "step": 2071 + }, + { + "epoch": 0.35558606487043076, + "grad_norm": 2.9775574819790185, + "learning_rate": 1.4937292008041117e-05, + "loss": 1.7622, + "step": 2072 + }, + { + "epoch": 0.3557576797666037, + "grad_norm": 2.452580712385438, + "learning_rate": 1.4932457596768032e-05, + "loss": 1.4865, + "step": 2073 + }, + { + "epoch": 0.35592929466277673, + "grad_norm": 2.751209670642272, + "learning_rate": 1.4927621661586806e-05, + "loss": 1.7595, + "step": 2074 + }, + { + "epoch": 0.35610090955894974, + "grad_norm": 2.594070489724724, + "learning_rate": 1.4922784203991531e-05, + "loss": 1.5734, + "step": 2075 + }, + { + "epoch": 0.3562725244551227, + "grad_norm": 2.919945339360576, + "learning_rate": 1.4917945225476762e-05, + "loss": 1.7262, + "step": 2076 + }, + { + "epoch": 0.3564441393512957, + "grad_norm": 2.674845919320873, + "learning_rate": 1.4913104727537526e-05, + "loss": 1.6933, + "step": 2077 + }, + { + "epoch": 0.35661575424746866, + "grad_norm": 3.4430302297906032, + "learning_rate": 1.4908262711669324e-05, + "loss": 1.6939, + "step": 2078 + }, + { + "epoch": 0.3567873691436417, + "grad_norm": 2.9284917044842507, + "learning_rate": 1.4903419179368113e-05, + "loss": 1.5715, + "step": 2079 + }, + { + "epoch": 0.35695898403981463, + "grad_norm": 2.854764720449646, + "learning_rate": 1.4898574132130337e-05, + "loss": 1.7242, + "step": 2080 + }, + { + "epoch": 0.35713059893598764, + "grad_norm": 2.581716627236574, + "learning_rate": 1.489372757145289e-05, + "loss": 1.6838, + "step": 2081 + }, + { + "epoch": 0.35730221383216065, + "grad_norm": 3.03205355443985, + "learning_rate": 1.4888879498833149e-05, + "loss": 1.6537, + "step": 2082 + }, + { + "epoch": 0.3574738287283336, + "grad_norm": 2.609054169161796, + "learning_rate": 1.4884029915768945e-05, + "loss": 1.7836, + "step": 2083 + }, + { + "epoch": 0.3576454436245066, + "grad_norm": 2.8147834592908714, + "learning_rate": 1.4879178823758584e-05, + "loss": 1.7895, + "step": 2084 + }, + { + "epoch": 0.3578170585206796, + "grad_norm": 4.421250672672322, + "learning_rate": 1.4874326224300837e-05, + "loss": 1.5548, + "step": 2085 + }, + { + "epoch": 0.3579886734168526, + "grad_norm": 3.5349509665651397, + "learning_rate": 1.4869472118894939e-05, + "loss": 1.7912, + "step": 2086 + }, + { + "epoch": 0.35816028831302554, + "grad_norm": 3.2974578297900976, + "learning_rate": 1.4864616509040591e-05, + "loss": 1.9432, + "step": 2087 + }, + { + "epoch": 0.35833190320919855, + "grad_norm": 3.027398437498267, + "learning_rate": 1.4859759396237956e-05, + "loss": 1.8214, + "step": 2088 + }, + { + "epoch": 0.35850351810537157, + "grad_norm": 2.219266843034949, + "learning_rate": 1.4854900781987667e-05, + "loss": 1.8622, + "step": 2089 + }, + { + "epoch": 0.3586751330015445, + "grad_norm": 3.0725642478698556, + "learning_rate": 1.4850040667790814e-05, + "loss": 1.7087, + "step": 2090 + }, + { + "epoch": 0.35884674789771753, + "grad_norm": 2.456343930505925, + "learning_rate": 1.4845179055148959e-05, + "loss": 1.8706, + "step": 2091 + }, + { + "epoch": 0.3590183627938905, + "grad_norm": 3.208164530094859, + "learning_rate": 1.4840315945564126e-05, + "loss": 1.7426, + "step": 2092 + }, + { + "epoch": 0.3591899776900635, + "grad_norm": 2.3661968785355163, + "learning_rate": 1.4835451340538785e-05, + "loss": 1.6436, + "step": 2093 + }, + { + "epoch": 0.3593615925862365, + "grad_norm": 2.7368973716615366, + "learning_rate": 1.483058524157589e-05, + "loss": 1.5317, + "step": 2094 + }, + { + "epoch": 0.35953320748240947, + "grad_norm": 2.504767054026463, + "learning_rate": 1.4825717650178846e-05, + "loss": 1.7232, + "step": 2095 + }, + { + "epoch": 0.3597048223785825, + "grad_norm": 3.4042583327702323, + "learning_rate": 1.4820848567851516e-05, + "loss": 1.6432, + "step": 2096 + }, + { + "epoch": 0.35987643727475543, + "grad_norm": 2.991618591792345, + "learning_rate": 1.4815977996098234e-05, + "loss": 1.77, + "step": 2097 + }, + { + "epoch": 0.36004805217092845, + "grad_norm": 2.325091528558994, + "learning_rate": 1.4811105936423787e-05, + "loss": 1.4872, + "step": 2098 + }, + { + "epoch": 0.3602196670671014, + "grad_norm": 2.3382210155462726, + "learning_rate": 1.4806232390333419e-05, + "loss": 1.4459, + "step": 2099 + }, + { + "epoch": 0.3603912819632744, + "grad_norm": 2.4281709203534114, + "learning_rate": 1.480135735933284e-05, + "loss": 1.5677, + "step": 2100 + }, + { + "epoch": 0.3605628968594474, + "grad_norm": 3.1766231144340877, + "learning_rate": 1.4796480844928218e-05, + "loss": 1.6072, + "step": 2101 + }, + { + "epoch": 0.3607345117556204, + "grad_norm": 3.214952600143939, + "learning_rate": 1.4791602848626173e-05, + "loss": 1.6929, + "step": 2102 + }, + { + "epoch": 0.3609061266517934, + "grad_norm": 3.332856283116005, + "learning_rate": 1.4786723371933786e-05, + "loss": 1.7261, + "step": 2103 + }, + { + "epoch": 0.36107774154796635, + "grad_norm": 2.2312020367976437, + "learning_rate": 1.4781842416358604e-05, + "loss": 1.5619, + "step": 2104 + }, + { + "epoch": 0.36124935644413936, + "grad_norm": 3.345718261205169, + "learning_rate": 1.4776959983408618e-05, + "loss": 1.7613, + "step": 2105 + }, + { + "epoch": 0.3614209713403123, + "grad_norm": 2.137050076720357, + "learning_rate": 1.4772076074592279e-05, + "loss": 1.3221, + "step": 2106 + }, + { + "epoch": 0.3615925862364853, + "grad_norm": 3.8357399228736773, + "learning_rate": 1.47671906914185e-05, + "loss": 1.6096, + "step": 2107 + }, + { + "epoch": 0.36176420113265834, + "grad_norm": 4.116236903039708, + "learning_rate": 1.4762303835396643e-05, + "loss": 1.6687, + "step": 2108 + }, + { + "epoch": 0.3619358160288313, + "grad_norm": 3.202549333640833, + "learning_rate": 1.4757415508036528e-05, + "loss": 1.7655, + "step": 2109 + }, + { + "epoch": 0.3621074309250043, + "grad_norm": 2.88016909904521, + "learning_rate": 1.4752525710848426e-05, + "loss": 1.7907, + "step": 2110 + }, + { + "epoch": 0.36227904582117726, + "grad_norm": 2.759735373177994, + "learning_rate": 1.4747634445343072e-05, + "loss": 1.6621, + "step": 2111 + }, + { + "epoch": 0.36245066071735027, + "grad_norm": 2.5521293862134544, + "learning_rate": 1.4742741713031636e-05, + "loss": 1.7276, + "step": 2112 + }, + { + "epoch": 0.3626222756135232, + "grad_norm": 2.682126368759819, + "learning_rate": 1.4737847515425766e-05, + "loss": 1.6332, + "step": 2113 + }, + { + "epoch": 0.36279389050969624, + "grad_norm": 2.3282003454522866, + "learning_rate": 1.473295185403754e-05, + "loss": 1.446, + "step": 2114 + }, + { + "epoch": 0.36296550540586925, + "grad_norm": 2.5090572008598295, + "learning_rate": 1.4728054730379503e-05, + "loss": 1.7423, + "step": 2115 + }, + { + "epoch": 0.3631371203020422, + "grad_norm": 2.5973100709418326, + "learning_rate": 1.4723156145964647e-05, + "loss": 1.6289, + "step": 2116 + }, + { + "epoch": 0.3633087351982152, + "grad_norm": 3.0858678293813724, + "learning_rate": 1.4718256102306407e-05, + "loss": 1.7861, + "step": 2117 + }, + { + "epoch": 0.36348035009438817, + "grad_norm": 2.1602360735875847, + "learning_rate": 1.4713354600918688e-05, + "loss": 1.503, + "step": 2118 + }, + { + "epoch": 0.3636519649905612, + "grad_norm": 2.906901057828225, + "learning_rate": 1.4708451643315827e-05, + "loss": 1.7194, + "step": 2119 + }, + { + "epoch": 0.3638235798867342, + "grad_norm": 3.1450995314553243, + "learning_rate": 1.4703547231012616e-05, + "loss": 1.6388, + "step": 2120 + }, + { + "epoch": 0.36399519478290715, + "grad_norm": 3.888297884721375, + "learning_rate": 1.4698641365524303e-05, + "loss": 1.9896, + "step": 2121 + }, + { + "epoch": 0.36416680967908016, + "grad_norm": 2.5314979232206514, + "learning_rate": 1.4693734048366582e-05, + "loss": 1.5323, + "step": 2122 + }, + { + "epoch": 0.3643384245752531, + "grad_norm": 2.625170574968521, + "learning_rate": 1.4688825281055587e-05, + "loss": 1.4178, + "step": 2123 + }, + { + "epoch": 0.36451003947142613, + "grad_norm": 3.8181101327414804, + "learning_rate": 1.4683915065107912e-05, + "loss": 1.806, + "step": 2124 + }, + { + "epoch": 0.3646816543675991, + "grad_norm": 2.4735566729714784, + "learning_rate": 1.4679003402040593e-05, + "loss": 1.7117, + "step": 2125 + }, + { + "epoch": 0.3648532692637721, + "grad_norm": 2.4898196807743935, + "learning_rate": 1.4674090293371113e-05, + "loss": 1.5495, + "step": 2126 + }, + { + "epoch": 0.3650248841599451, + "grad_norm": 3.971874749674553, + "learning_rate": 1.4669175740617401e-05, + "loss": 1.7275, + "step": 2127 + }, + { + "epoch": 0.36519649905611806, + "grad_norm": 2.2521467588803996, + "learning_rate": 1.4664259745297838e-05, + "loss": 1.7097, + "step": 2128 + }, + { + "epoch": 0.3653681139522911, + "grad_norm": 3.6612101128507755, + "learning_rate": 1.4659342308931237e-05, + "loss": 1.8567, + "step": 2129 + }, + { + "epoch": 0.36553972884846403, + "grad_norm": 3.187715967776502, + "learning_rate": 1.4654423433036873e-05, + "loss": 1.5872, + "step": 2130 + }, + { + "epoch": 0.36571134374463704, + "grad_norm": 2.1535557633300217, + "learning_rate": 1.4649503119134456e-05, + "loss": 1.5298, + "step": 2131 + }, + { + "epoch": 0.36588295864081, + "grad_norm": 2.917728221700515, + "learning_rate": 1.464458136874414e-05, + "loss": 1.9201, + "step": 2132 + }, + { + "epoch": 0.366054573536983, + "grad_norm": 3.233510869815508, + "learning_rate": 1.463965818338653e-05, + "loss": 1.6774, + "step": 2133 + }, + { + "epoch": 0.366226188433156, + "grad_norm": 2.6731811836923196, + "learning_rate": 1.4634733564582666e-05, + "loss": 1.8772, + "step": 2134 + }, + { + "epoch": 0.366397803329329, + "grad_norm": 2.625132452814094, + "learning_rate": 1.4629807513854033e-05, + "loss": 1.7352, + "step": 2135 + }, + { + "epoch": 0.366569418225502, + "grad_norm": 3.016963574875201, + "learning_rate": 1.4624880032722561e-05, + "loss": 1.6212, + "step": 2136 + }, + { + "epoch": 0.36674103312167494, + "grad_norm": 2.770623140822268, + "learning_rate": 1.4619951122710624e-05, + "loss": 1.5459, + "step": 2137 + }, + { + "epoch": 0.36691264801784795, + "grad_norm": 3.366373600057806, + "learning_rate": 1.461502078534103e-05, + "loss": 1.842, + "step": 2138 + }, + { + "epoch": 0.36708426291402096, + "grad_norm": 3.06896023646229, + "learning_rate": 1.461008902213703e-05, + "loss": 1.6927, + "step": 2139 + }, + { + "epoch": 0.3672558778101939, + "grad_norm": 3.0015100240991637, + "learning_rate": 1.460515583462233e-05, + "loss": 1.721, + "step": 2140 + }, + { + "epoch": 0.36742749270636693, + "grad_norm": 3.0118975738551232, + "learning_rate": 1.4600221224321045e-05, + "loss": 1.7634, + "step": 2141 + }, + { + "epoch": 0.3675991076025399, + "grad_norm": 3.371891528487684, + "learning_rate": 1.4595285192757763e-05, + "loss": 1.8535, + "step": 2142 + }, + { + "epoch": 0.3677707224987129, + "grad_norm": 3.0411025316178204, + "learning_rate": 1.4590347741457489e-05, + "loss": 1.7882, + "step": 2143 + }, + { + "epoch": 0.36794233739488585, + "grad_norm": 3.0786992984465322, + "learning_rate": 1.4585408871945675e-05, + "loss": 1.6479, + "step": 2144 + }, + { + "epoch": 0.36811395229105887, + "grad_norm": 3.033010693532836, + "learning_rate": 1.4580468585748212e-05, + "loss": 1.7905, + "step": 2145 + }, + { + "epoch": 0.3682855671872319, + "grad_norm": 2.8625627407557386, + "learning_rate": 1.457552688439143e-05, + "loss": 1.8307, + "step": 2146 + }, + { + "epoch": 0.36845718208340483, + "grad_norm": 2.324754800708334, + "learning_rate": 1.4570583769402085e-05, + "loss": 1.6194, + "step": 2147 + }, + { + "epoch": 0.36862879697957784, + "grad_norm": 2.5836327999141115, + "learning_rate": 1.4565639242307381e-05, + "loss": 1.7015, + "step": 2148 + }, + { + "epoch": 0.3688004118757508, + "grad_norm": 2.316074404157186, + "learning_rate": 1.4560693304634956e-05, + "loss": 1.677, + "step": 2149 + }, + { + "epoch": 0.3689720267719238, + "grad_norm": 2.563453653505684, + "learning_rate": 1.4555745957912886e-05, + "loss": 1.7119, + "step": 2150 + }, + { + "epoch": 0.36914364166809677, + "grad_norm": 2.865707726633867, + "learning_rate": 1.4550797203669673e-05, + "loss": 1.7043, + "step": 2151 + }, + { + "epoch": 0.3693152565642698, + "grad_norm": 5.011378547660993, + "learning_rate": 1.4545847043434266e-05, + "loss": 1.5669, + "step": 2152 + }, + { + "epoch": 0.3694868714604428, + "grad_norm": 3.202085857937417, + "learning_rate": 1.4540895478736037e-05, + "loss": 1.5395, + "step": 2153 + }, + { + "epoch": 0.36965848635661575, + "grad_norm": 2.6501341424564138, + "learning_rate": 1.45359425111048e-05, + "loss": 1.6759, + "step": 2154 + }, + { + "epoch": 0.36983010125278876, + "grad_norm": 2.621896430240285, + "learning_rate": 1.4530988142070802e-05, + "loss": 1.7386, + "step": 2155 + }, + { + "epoch": 0.3700017161489617, + "grad_norm": 2.395999084791078, + "learning_rate": 1.4526032373164717e-05, + "loss": 1.5637, + "step": 2156 + }, + { + "epoch": 0.3701733310451347, + "grad_norm": 3.6772351195774786, + "learning_rate": 1.4521075205917656e-05, + "loss": 1.8141, + "step": 2157 + }, + { + "epoch": 0.3703449459413077, + "grad_norm": 9.020394448334791, + "learning_rate": 1.4516116641861164e-05, + "loss": 1.6614, + "step": 2158 + }, + { + "epoch": 0.3705165608374807, + "grad_norm": 2.8309602729085053, + "learning_rate": 1.4511156682527215e-05, + "loss": 1.6501, + "step": 2159 + }, + { + "epoch": 0.3706881757336537, + "grad_norm": 3.5653747504992825, + "learning_rate": 1.4506195329448205e-05, + "loss": 1.7632, + "step": 2160 + }, + { + "epoch": 0.37085979062982666, + "grad_norm": 3.3034069919113422, + "learning_rate": 1.4501232584156984e-05, + "loss": 1.6739, + "step": 2161 + }, + { + "epoch": 0.37103140552599967, + "grad_norm": 4.178189283199575, + "learning_rate": 1.4496268448186806e-05, + "loss": 1.7542, + "step": 2162 + }, + { + "epoch": 0.3712030204221726, + "grad_norm": 4.365822456310529, + "learning_rate": 1.449130292307137e-05, + "loss": 1.8823, + "step": 2163 + }, + { + "epoch": 0.37137463531834564, + "grad_norm": 2.899211322850207, + "learning_rate": 1.4486336010344804e-05, + "loss": 1.6283, + "step": 2164 + }, + { + "epoch": 0.37154625021451865, + "grad_norm": 3.9142505904887166, + "learning_rate": 1.4481367711541653e-05, + "loss": 1.5994, + "step": 2165 + }, + { + "epoch": 0.3717178651106916, + "grad_norm": 2.8814960731444406, + "learning_rate": 1.4476398028196909e-05, + "loss": 1.7413, + "step": 2166 + }, + { + "epoch": 0.3718894800068646, + "grad_norm": 2.487125036029448, + "learning_rate": 1.4471426961845976e-05, + "loss": 1.7801, + "step": 2167 + }, + { + "epoch": 0.37206109490303757, + "grad_norm": 2.7121970885323274, + "learning_rate": 1.4466454514024685e-05, + "loss": 1.6655, + "step": 2168 + }, + { + "epoch": 0.3722327097992106, + "grad_norm": 3.1423092690643526, + "learning_rate": 1.446148068626931e-05, + "loss": 1.7162, + "step": 2169 + }, + { + "epoch": 0.37240432469538354, + "grad_norm": 2.862760014745284, + "learning_rate": 1.4456505480116533e-05, + "loss": 1.8156, + "step": 2170 + }, + { + "epoch": 0.37257593959155655, + "grad_norm": 2.6915273388266487, + "learning_rate": 1.4451528897103476e-05, + "loss": 1.6721, + "step": 2171 + }, + { + "epoch": 0.37274755448772956, + "grad_norm": 2.5239709085159605, + "learning_rate": 1.4446550938767669e-05, + "loss": 1.6769, + "step": 2172 + }, + { + "epoch": 0.3729191693839025, + "grad_norm": 2.67868547969852, + "learning_rate": 1.4441571606647089e-05, + "loss": 1.8929, + "step": 2173 + }, + { + "epoch": 0.3730907842800755, + "grad_norm": 3.404084445351775, + "learning_rate": 1.4436590902280122e-05, + "loss": 1.7555, + "step": 2174 + }, + { + "epoch": 0.3732623991762485, + "grad_norm": 2.736853970976809, + "learning_rate": 1.4431608827205579e-05, + "loss": 1.7508, + "step": 2175 + }, + { + "epoch": 0.3734340140724215, + "grad_norm": 3.0522568393109863, + "learning_rate": 1.4426625382962706e-05, + "loss": 1.838, + "step": 2176 + }, + { + "epoch": 0.37360562896859445, + "grad_norm": 3.2251956796296506, + "learning_rate": 1.4421640571091153e-05, + "loss": 1.7563, + "step": 2177 + }, + { + "epoch": 0.37377724386476746, + "grad_norm": 2.326256885234364, + "learning_rate": 1.4416654393131013e-05, + "loss": 1.6253, + "step": 2178 + }, + { + "epoch": 0.3739488587609405, + "grad_norm": 2.929950220726013, + "learning_rate": 1.4411666850622788e-05, + "loss": 1.8398, + "step": 2179 + }, + { + "epoch": 0.37412047365711343, + "grad_norm": 2.882205056595399, + "learning_rate": 1.4406677945107398e-05, + "loss": 1.7602, + "step": 2180 + }, + { + "epoch": 0.37429208855328644, + "grad_norm": 2.154015153842205, + "learning_rate": 1.4401687678126205e-05, + "loss": 1.5701, + "step": 2181 + }, + { + "epoch": 0.3744637034494594, + "grad_norm": 2.370836467763457, + "learning_rate": 1.4396696051220965e-05, + "loss": 1.6229, + "step": 2182 + }, + { + "epoch": 0.3746353183456324, + "grad_norm": 2.760721971689484, + "learning_rate": 1.4391703065933873e-05, + "loss": 1.7536, + "step": 2183 + }, + { + "epoch": 0.37480693324180536, + "grad_norm": 2.8574886068966108, + "learning_rate": 1.4386708723807532e-05, + "loss": 1.6223, + "step": 2184 + }, + { + "epoch": 0.3749785481379784, + "grad_norm": 2.371703482108517, + "learning_rate": 1.438171302638498e-05, + "loss": 1.7262, + "step": 2185 + }, + { + "epoch": 0.3751501630341514, + "grad_norm": 2.4793168684302165, + "learning_rate": 1.4376715975209654e-05, + "loss": 1.6545, + "step": 2186 + }, + { + "epoch": 0.37532177793032434, + "grad_norm": 2.532404862793482, + "learning_rate": 1.437171757182542e-05, + "loss": 1.5066, + "step": 2187 + }, + { + "epoch": 0.37549339282649735, + "grad_norm": 3.7874693942879265, + "learning_rate": 1.4366717817776567e-05, + "loss": 1.6264, + "step": 2188 + }, + { + "epoch": 0.3756650077226703, + "grad_norm": 3.116036982270046, + "learning_rate": 1.4361716714607785e-05, + "loss": 1.5684, + "step": 2189 + }, + { + "epoch": 0.3758366226188433, + "grad_norm": 2.5175876506273043, + "learning_rate": 1.4356714263864198e-05, + "loss": 1.6901, + "step": 2190 + }, + { + "epoch": 0.37600823751501633, + "grad_norm": 2.7037305648012486, + "learning_rate": 1.4351710467091337e-05, + "loss": 1.8673, + "step": 2191 + }, + { + "epoch": 0.3761798524111893, + "grad_norm": 2.511107908132675, + "learning_rate": 1.4346705325835148e-05, + "loss": 1.5539, + "step": 2192 + }, + { + "epoch": 0.3763514673073623, + "grad_norm": 2.489911802380476, + "learning_rate": 1.4341698841641997e-05, + "loss": 1.8653, + "step": 2193 + }, + { + "epoch": 0.37652308220353525, + "grad_norm": 2.4525580717293223, + "learning_rate": 1.4336691016058665e-05, + "loss": 1.4228, + "step": 2194 + }, + { + "epoch": 0.37669469709970826, + "grad_norm": 2.367772194895001, + "learning_rate": 1.4331681850632344e-05, + "loss": 1.6974, + "step": 2195 + }, + { + "epoch": 0.3768663119958812, + "grad_norm": 3.47541828158448, + "learning_rate": 1.4326671346910641e-05, + "loss": 1.9933, + "step": 2196 + }, + { + "epoch": 0.37703792689205423, + "grad_norm": 2.7142388534405297, + "learning_rate": 1.4321659506441577e-05, + "loss": 1.6537, + "step": 2197 + }, + { + "epoch": 0.37720954178822724, + "grad_norm": 2.652141709157279, + "learning_rate": 1.4316646330773585e-05, + "loss": 1.7776, + "step": 2198 + }, + { + "epoch": 0.3773811566844002, + "grad_norm": 3.019675057855687, + "learning_rate": 1.431163182145551e-05, + "loss": 1.7079, + "step": 2199 + }, + { + "epoch": 0.3775527715805732, + "grad_norm": 2.8810525572386636, + "learning_rate": 1.4306615980036618e-05, + "loss": 1.5222, + "step": 2200 + }, + { + "epoch": 0.37772438647674617, + "grad_norm": 2.4813778721533546, + "learning_rate": 1.430159880806657e-05, + "loss": 1.4897, + "step": 2201 + }, + { + "epoch": 0.3778960013729192, + "grad_norm": 2.918744639476238, + "learning_rate": 1.429658030709545e-05, + "loss": 1.8064, + "step": 2202 + }, + { + "epoch": 0.37806761626909213, + "grad_norm": 2.486611141805446, + "learning_rate": 1.4291560478673756e-05, + "loss": 1.6809, + "step": 2203 + }, + { + "epoch": 0.37823923116526514, + "grad_norm": 2.525480350108589, + "learning_rate": 1.4286539324352378e-05, + "loss": 1.4924, + "step": 2204 + }, + { + "epoch": 0.37841084606143816, + "grad_norm": 2.7024109695281577, + "learning_rate": 1.4281516845682637e-05, + "loss": 1.7283, + "step": 2205 + }, + { + "epoch": 0.3785824609576111, + "grad_norm": 3.4908404464474008, + "learning_rate": 1.427649304421625e-05, + "loss": 1.7113, + "step": 2206 + }, + { + "epoch": 0.3787540758537841, + "grad_norm": 2.9712620329475303, + "learning_rate": 1.4271467921505348e-05, + "loss": 1.8099, + "step": 2207 + }, + { + "epoch": 0.3789256907499571, + "grad_norm": 2.777802338360159, + "learning_rate": 1.4266441479102464e-05, + "loss": 1.8899, + "step": 2208 + }, + { + "epoch": 0.3790973056461301, + "grad_norm": 2.5937609206430783, + "learning_rate": 1.4261413718560549e-05, + "loss": 1.6308, + "step": 2209 + }, + { + "epoch": 0.37926892054230305, + "grad_norm": 2.90205576377282, + "learning_rate": 1.4256384641432952e-05, + "loss": 1.7082, + "step": 2210 + }, + { + "epoch": 0.37944053543847606, + "grad_norm": 2.811984879702953, + "learning_rate": 1.4251354249273432e-05, + "loss": 1.6481, + "step": 2211 + }, + { + "epoch": 0.37961215033464907, + "grad_norm": 2.5821967392923155, + "learning_rate": 1.424632254363616e-05, + "loss": 1.9296, + "step": 2212 + }, + { + "epoch": 0.379783765230822, + "grad_norm": 2.434964606347429, + "learning_rate": 1.4241289526075697e-05, + "loss": 1.7321, + "step": 2213 + }, + { + "epoch": 0.37995538012699503, + "grad_norm": 2.288895494712603, + "learning_rate": 1.4236255198147029e-05, + "loss": 1.5558, + "step": 2214 + }, + { + "epoch": 0.380126995023168, + "grad_norm": 2.8732707429446687, + "learning_rate": 1.4231219561405533e-05, + "loss": 1.9385, + "step": 2215 + }, + { + "epoch": 0.380298609919341, + "grad_norm": 3.56963046537789, + "learning_rate": 1.4226182617406996e-05, + "loss": 1.9864, + "step": 2216 + }, + { + "epoch": 0.380470224815514, + "grad_norm": 3.302977319531517, + "learning_rate": 1.422114436770761e-05, + "loss": 1.5754, + "step": 2217 + }, + { + "epoch": 0.38064183971168697, + "grad_norm": 2.5965918821644167, + "learning_rate": 1.4216104813863962e-05, + "loss": 1.7756, + "step": 2218 + }, + { + "epoch": 0.38081345460786, + "grad_norm": 3.002135474152398, + "learning_rate": 1.4211063957433056e-05, + "loss": 1.8891, + "step": 2219 + }, + { + "epoch": 0.38098506950403294, + "grad_norm": 2.461394979703817, + "learning_rate": 1.4206021799972283e-05, + "loss": 1.7562, + "step": 2220 + }, + { + "epoch": 0.38115668440020595, + "grad_norm": 2.320356021719385, + "learning_rate": 1.4200978343039448e-05, + "loss": 1.6289, + "step": 2221 + }, + { + "epoch": 0.3813282992963789, + "grad_norm": 2.6768833977800015, + "learning_rate": 1.4195933588192751e-05, + "loss": 1.642, + "step": 2222 + }, + { + "epoch": 0.3814999141925519, + "grad_norm": 2.780108773902873, + "learning_rate": 1.4190887536990797e-05, + "loss": 1.7184, + "step": 2223 + }, + { + "epoch": 0.3816715290887249, + "grad_norm": 2.4604900837998565, + "learning_rate": 1.418584019099259e-05, + "loss": 1.5972, + "step": 2224 + }, + { + "epoch": 0.3818431439848979, + "grad_norm": 3.0313746678478672, + "learning_rate": 1.418079155175753e-05, + "loss": 1.6771, + "step": 2225 + }, + { + "epoch": 0.3820147588810709, + "grad_norm": 3.11334474632266, + "learning_rate": 1.4175741620845422e-05, + "loss": 1.7264, + "step": 2226 + }, + { + "epoch": 0.38218637377724385, + "grad_norm": 3.3460943489633057, + "learning_rate": 1.4170690399816469e-05, + "loss": 1.6929, + "step": 2227 + }, + { + "epoch": 0.38235798867341686, + "grad_norm": 2.6861366414202537, + "learning_rate": 1.4165637890231267e-05, + "loss": 1.7905, + "step": 2228 + }, + { + "epoch": 0.3825296035695898, + "grad_norm": 3.492722916148379, + "learning_rate": 1.4160584093650825e-05, + "loss": 1.743, + "step": 2229 + }, + { + "epoch": 0.3827012184657628, + "grad_norm": 4.023257395500161, + "learning_rate": 1.4155529011636528e-05, + "loss": 2.0173, + "step": 2230 + }, + { + "epoch": 0.38287283336193584, + "grad_norm": 2.0917941336297705, + "learning_rate": 1.4150472645750175e-05, + "loss": 1.3025, + "step": 2231 + }, + { + "epoch": 0.3830444482581088, + "grad_norm": 3.006190689162569, + "learning_rate": 1.4145414997553956e-05, + "loss": 1.8324, + "step": 2232 + }, + { + "epoch": 0.3832160631542818, + "grad_norm": 3.122814496435496, + "learning_rate": 1.4140356068610459e-05, + "loss": 1.7966, + "step": 2233 + }, + { + "epoch": 0.38338767805045476, + "grad_norm": 3.2009987043855195, + "learning_rate": 1.4135295860482663e-05, + "loss": 1.7276, + "step": 2234 + }, + { + "epoch": 0.3835592929466278, + "grad_norm": 2.554782866016197, + "learning_rate": 1.4130234374733944e-05, + "loss": 1.5416, + "step": 2235 + }, + { + "epoch": 0.38373090784280073, + "grad_norm": 2.5950255478358644, + "learning_rate": 1.4125171612928083e-05, + "loss": 1.5395, + "step": 2236 + }, + { + "epoch": 0.38390252273897374, + "grad_norm": 2.8831155695688375, + "learning_rate": 1.4120107576629235e-05, + "loss": 1.7688, + "step": 2237 + }, + { + "epoch": 0.38407413763514675, + "grad_norm": 2.5397518896498155, + "learning_rate": 1.4115042267401968e-05, + "loss": 1.6666, + "step": 2238 + }, + { + "epoch": 0.3842457525313197, + "grad_norm": 3.6718608295390442, + "learning_rate": 1.4109975686811231e-05, + "loss": 1.5902, + "step": 2239 + }, + { + "epoch": 0.3844173674274927, + "grad_norm": 2.363713596141586, + "learning_rate": 1.410490783642237e-05, + "loss": 1.5944, + "step": 2240 + }, + { + "epoch": 0.3845889823236657, + "grad_norm": 3.165948877022219, + "learning_rate": 1.4099838717801126e-05, + "loss": 1.6349, + "step": 2241 + }, + { + "epoch": 0.3847605972198387, + "grad_norm": 2.640233580352414, + "learning_rate": 1.4094768332513629e-05, + "loss": 1.704, + "step": 2242 + }, + { + "epoch": 0.3849322121160117, + "grad_norm": 2.923584546226495, + "learning_rate": 1.4089696682126399e-05, + "loss": 1.7403, + "step": 2243 + }, + { + "epoch": 0.38510382701218465, + "grad_norm": 2.443877454341655, + "learning_rate": 1.4084623768206349e-05, + "loss": 1.6041, + "step": 2244 + }, + { + "epoch": 0.38527544190835766, + "grad_norm": 3.0326227011507334, + "learning_rate": 1.4079549592320782e-05, + "loss": 1.5833, + "step": 2245 + }, + { + "epoch": 0.3854470568045306, + "grad_norm": 3.1657560521997956, + "learning_rate": 1.4074474156037393e-05, + "loss": 1.537, + "step": 2246 + }, + { + "epoch": 0.38561867170070363, + "grad_norm": 2.5425969518956966, + "learning_rate": 1.4069397460924259e-05, + "loss": 1.7082, + "step": 2247 + }, + { + "epoch": 0.3857902865968766, + "grad_norm": 3.120102526551189, + "learning_rate": 1.4064319508549858e-05, + "loss": 1.8363, + "step": 2248 + }, + { + "epoch": 0.3859619014930496, + "grad_norm": 2.6598909836762177, + "learning_rate": 1.4059240300483041e-05, + "loss": 1.8236, + "step": 2249 + }, + { + "epoch": 0.3861335163892226, + "grad_norm": 2.6328046782494843, + "learning_rate": 1.4054159838293063e-05, + "loss": 1.6207, + "step": 2250 + }, + { + "epoch": 0.38630513128539556, + "grad_norm": 2.7213315919935273, + "learning_rate": 1.4049078123549556e-05, + "loss": 1.6323, + "step": 2251 + }, + { + "epoch": 0.3864767461815686, + "grad_norm": 2.9374698143724487, + "learning_rate": 1.4043995157822539e-05, + "loss": 1.7496, + "step": 2252 + }, + { + "epoch": 0.38664836107774153, + "grad_norm": 2.789279696962537, + "learning_rate": 1.4038910942682424e-05, + "loss": 1.7075, + "step": 2253 + }, + { + "epoch": 0.38681997597391454, + "grad_norm": 2.544224307618862, + "learning_rate": 1.4033825479700008e-05, + "loss": 1.7613, + "step": 2254 + }, + { + "epoch": 0.3869915908700875, + "grad_norm": 2.847108668594769, + "learning_rate": 1.4028738770446463e-05, + "loss": 1.4704, + "step": 2255 + }, + { + "epoch": 0.3871632057662605, + "grad_norm": 2.6030943868529692, + "learning_rate": 1.4023650816493362e-05, + "loss": 1.6116, + "step": 2256 + }, + { + "epoch": 0.3873348206624335, + "grad_norm": 2.34249074933021, + "learning_rate": 1.401856161941265e-05, + "loss": 1.3736, + "step": 2257 + }, + { + "epoch": 0.3875064355586065, + "grad_norm": 2.2890401834443255, + "learning_rate": 1.4013471180776661e-05, + "loss": 1.5081, + "step": 2258 + }, + { + "epoch": 0.3876780504547795, + "grad_norm": 3.156247168187958, + "learning_rate": 1.4008379502158111e-05, + "loss": 1.7655, + "step": 2259 + }, + { + "epoch": 0.38784966535095244, + "grad_norm": 3.1658938242976715, + "learning_rate": 1.4003286585130106e-05, + "loss": 1.6149, + "step": 2260 + }, + { + "epoch": 0.38802128024712546, + "grad_norm": 2.9296203967992644, + "learning_rate": 1.3998192431266123e-05, + "loss": 1.6677, + "step": 2261 + }, + { + "epoch": 0.3881928951432984, + "grad_norm": 4.16357736507998, + "learning_rate": 1.399309704214003e-05, + "loss": 1.572, + "step": 2262 + }, + { + "epoch": 0.3883645100394714, + "grad_norm": 2.515372102550573, + "learning_rate": 1.3988000419326073e-05, + "loss": 1.7159, + "step": 2263 + }, + { + "epoch": 0.38853612493564443, + "grad_norm": 3.0463829188010574, + "learning_rate": 1.3982902564398876e-05, + "loss": 1.7418, + "step": 2264 + }, + { + "epoch": 0.3887077398318174, + "grad_norm": 3.460164535727435, + "learning_rate": 1.3977803478933455e-05, + "loss": 1.6016, + "step": 2265 + }, + { + "epoch": 0.3888793547279904, + "grad_norm": 2.6182099348643346, + "learning_rate": 1.3972703164505195e-05, + "loss": 1.5533, + "step": 2266 + }, + { + "epoch": 0.38905096962416336, + "grad_norm": 2.76380799104909, + "learning_rate": 1.3967601622689864e-05, + "loss": 1.6983, + "step": 2267 + }, + { + "epoch": 0.38922258452033637, + "grad_norm": 2.8298724202907732, + "learning_rate": 1.3962498855063606e-05, + "loss": 1.7373, + "step": 2268 + }, + { + "epoch": 0.3893941994165094, + "grad_norm": 2.7153669429483562, + "learning_rate": 1.3957394863202955e-05, + "loss": 1.6579, + "step": 2269 + }, + { + "epoch": 0.38956581431268233, + "grad_norm": 2.390543625370323, + "learning_rate": 1.395228964868481e-05, + "loss": 1.5644, + "step": 2270 + }, + { + "epoch": 0.38973742920885535, + "grad_norm": 3.0979753570571087, + "learning_rate": 1.3947183213086455e-05, + "loss": 1.5409, + "step": 2271 + }, + { + "epoch": 0.3899090441050283, + "grad_norm": 2.4886974863525704, + "learning_rate": 1.394207555798555e-05, + "loss": 1.528, + "step": 2272 + }, + { + "epoch": 0.3900806590012013, + "grad_norm": 3.028471839823337, + "learning_rate": 1.393696668496013e-05, + "loss": 1.7404, + "step": 2273 + }, + { + "epoch": 0.39025227389737427, + "grad_norm": 2.6314129268290167, + "learning_rate": 1.3931856595588609e-05, + "loss": 1.5899, + "step": 2274 + }, + { + "epoch": 0.3904238887935473, + "grad_norm": 2.7668023188081143, + "learning_rate": 1.3926745291449773e-05, + "loss": 1.8042, + "step": 2275 + }, + { + "epoch": 0.3905955036897203, + "grad_norm": 2.7059509113929123, + "learning_rate": 1.3921632774122787e-05, + "loss": 1.7132, + "step": 2276 + }, + { + "epoch": 0.39076711858589325, + "grad_norm": 3.3045309371531886, + "learning_rate": 1.3916519045187192e-05, + "loss": 1.9183, + "step": 2277 + }, + { + "epoch": 0.39093873348206626, + "grad_norm": 2.5797282027846453, + "learning_rate": 1.3911404106222896e-05, + "loss": 1.7507, + "step": 2278 + }, + { + "epoch": 0.3911103483782392, + "grad_norm": 2.7191289241677663, + "learning_rate": 1.3906287958810192e-05, + "loss": 1.6681, + "step": 2279 + }, + { + "epoch": 0.3912819632744122, + "grad_norm": 2.963087524542881, + "learning_rate": 1.3901170604529735e-05, + "loss": 1.907, + "step": 2280 + }, + { + "epoch": 0.3914535781705852, + "grad_norm": 2.6569891391232145, + "learning_rate": 1.3896052044962558e-05, + "loss": 1.6908, + "step": 2281 + }, + { + "epoch": 0.3916251930667582, + "grad_norm": 2.879023993696917, + "learning_rate": 1.3890932281690068e-05, + "loss": 1.5593, + "step": 2282 + }, + { + "epoch": 0.3917968079629312, + "grad_norm": 3.2212504368048727, + "learning_rate": 1.3885811316294042e-05, + "loss": 1.9828, + "step": 2283 + }, + { + "epoch": 0.39196842285910416, + "grad_norm": 3.254504131122837, + "learning_rate": 1.3880689150356633e-05, + "loss": 1.5324, + "step": 2284 + }, + { + "epoch": 0.39214003775527717, + "grad_norm": 2.849534157439421, + "learning_rate": 1.3875565785460348e-05, + "loss": 1.6472, + "step": 2285 + }, + { + "epoch": 0.3923116526514501, + "grad_norm": 3.093295169429665, + "learning_rate": 1.387044122318809e-05, + "loss": 1.6151, + "step": 2286 + }, + { + "epoch": 0.39248326754762314, + "grad_norm": 2.9988856293266926, + "learning_rate": 1.3865315465123112e-05, + "loss": 1.5012, + "step": 2287 + }, + { + "epoch": 0.3926548824437961, + "grad_norm": 3.3798210062064844, + "learning_rate": 1.3860188512849044e-05, + "loss": 1.8044, + "step": 2288 + }, + { + "epoch": 0.3928264973399691, + "grad_norm": 3.184967592285296, + "learning_rate": 1.3855060367949889e-05, + "loss": 1.7279, + "step": 2289 + }, + { + "epoch": 0.3929981122361421, + "grad_norm": 4.083903163647933, + "learning_rate": 1.384993103201001e-05, + "loss": 1.7003, + "step": 2290 + }, + { + "epoch": 0.39316972713231507, + "grad_norm": 3.239030991580772, + "learning_rate": 1.3844800506614138e-05, + "loss": 1.8288, + "step": 2291 + }, + { + "epoch": 0.3933413420284881, + "grad_norm": 2.5920789014683554, + "learning_rate": 1.383966879334738e-05, + "loss": 1.5926, + "step": 2292 + }, + { + "epoch": 0.39351295692466104, + "grad_norm": 4.2451886486307, + "learning_rate": 1.3834535893795204e-05, + "loss": 1.9014, + "step": 2293 + }, + { + "epoch": 0.39368457182083405, + "grad_norm": 2.7500562719662014, + "learning_rate": 1.3829401809543448e-05, + "loss": 1.7669, + "step": 2294 + }, + { + "epoch": 0.39385618671700706, + "grad_norm": 3.538445334694933, + "learning_rate": 1.3824266542178308e-05, + "loss": 1.5683, + "step": 2295 + }, + { + "epoch": 0.39402780161318, + "grad_norm": 2.352844912970664, + "learning_rate": 1.3819130093286359e-05, + "loss": 1.3802, + "step": 2296 + }, + { + "epoch": 0.39419941650935303, + "grad_norm": 2.739344128865409, + "learning_rate": 1.3813992464454526e-05, + "loss": 1.5835, + "step": 2297 + }, + { + "epoch": 0.394371031405526, + "grad_norm": 2.849176805763815, + "learning_rate": 1.3808853657270112e-05, + "loss": 1.725, + "step": 2298 + }, + { + "epoch": 0.394542646301699, + "grad_norm": 2.888560274894531, + "learning_rate": 1.3803713673320773e-05, + "loss": 1.6273, + "step": 2299 + }, + { + "epoch": 0.39471426119787195, + "grad_norm": 2.9455165438999744, + "learning_rate": 1.3798572514194538e-05, + "loss": 1.851, + "step": 2300 + }, + { + "epoch": 0.39488587609404496, + "grad_norm": 3.119256667747611, + "learning_rate": 1.3793430181479788e-05, + "loss": 1.6184, + "step": 2301 + }, + { + "epoch": 0.395057490990218, + "grad_norm": 3.3863437314608413, + "learning_rate": 1.378828667676528e-05, + "loss": 1.6019, + "step": 2302 + }, + { + "epoch": 0.39522910588639093, + "grad_norm": 3.4413384787839294, + "learning_rate": 1.3783142001640122e-05, + "loss": 1.8528, + "step": 2303 + }, + { + "epoch": 0.39540072078256394, + "grad_norm": 2.6511737602765915, + "learning_rate": 1.3777996157693787e-05, + "loss": 1.7517, + "step": 2304 + }, + { + "epoch": 0.3955723356787369, + "grad_norm": 2.5101174885312374, + "learning_rate": 1.3772849146516114e-05, + "loss": 1.7305, + "step": 2305 + }, + { + "epoch": 0.3957439505749099, + "grad_norm": 3.5403962745653415, + "learning_rate": 1.37677009696973e-05, + "loss": 1.7702, + "step": 2306 + }, + { + "epoch": 0.39591556547108286, + "grad_norm": 3.083670404857814, + "learning_rate": 1.3762551628827892e-05, + "loss": 1.7577, + "step": 2307 + }, + { + "epoch": 0.3960871803672559, + "grad_norm": 2.7012615370850206, + "learning_rate": 1.3757401125498815e-05, + "loss": 1.6658, + "step": 2308 + }, + { + "epoch": 0.3962587952634289, + "grad_norm": 5.123512818791215, + "learning_rate": 1.3752249461301337e-05, + "loss": 1.8537, + "step": 2309 + }, + { + "epoch": 0.39643041015960184, + "grad_norm": 2.7960007122865425, + "learning_rate": 1.3747096637827092e-05, + "loss": 1.5898, + "step": 2310 + }, + { + "epoch": 0.39660202505577485, + "grad_norm": 3.1495010762429465, + "learning_rate": 1.3741942656668075e-05, + "loss": 1.8736, + "step": 2311 + }, + { + "epoch": 0.3967736399519478, + "grad_norm": 3.1866458287117445, + "learning_rate": 1.3736787519416632e-05, + "loss": 1.6445, + "step": 2312 + }, + { + "epoch": 0.3969452548481208, + "grad_norm": 3.141208465191903, + "learning_rate": 1.3731631227665472e-05, + "loss": 1.8147, + "step": 2313 + }, + { + "epoch": 0.39711686974429383, + "grad_norm": 3.4641279223813055, + "learning_rate": 1.372647378300765e-05, + "loss": 1.7662, + "step": 2314 + }, + { + "epoch": 0.3972884846404668, + "grad_norm": 2.769606806978043, + "learning_rate": 1.3721315187036594e-05, + "loss": 1.5127, + "step": 2315 + }, + { + "epoch": 0.3974600995366398, + "grad_norm": 2.7590882144200206, + "learning_rate": 1.3716155441346078e-05, + "loss": 1.732, + "step": 2316 + }, + { + "epoch": 0.39763171443281276, + "grad_norm": 6.126264387812704, + "learning_rate": 1.3710994547530225e-05, + "loss": 1.7298, + "step": 2317 + }, + { + "epoch": 0.39780332932898577, + "grad_norm": 3.6753904077094206, + "learning_rate": 1.3705832507183528e-05, + "loss": 1.7542, + "step": 2318 + }, + { + "epoch": 0.3979749442251587, + "grad_norm": 3.9291222513709148, + "learning_rate": 1.3700669321900819e-05, + "loss": 1.7058, + "step": 2319 + }, + { + "epoch": 0.39814655912133173, + "grad_norm": 3.2643754407351784, + "learning_rate": 1.3695504993277296e-05, + "loss": 1.6015, + "step": 2320 + }, + { + "epoch": 0.39831817401750474, + "grad_norm": 2.6949351056020636, + "learning_rate": 1.36903395229085e-05, + "loss": 1.6077, + "step": 2321 + }, + { + "epoch": 0.3984897889136777, + "grad_norm": 3.225576783310746, + "learning_rate": 1.3685172912390332e-05, + "loss": 1.7984, + "step": 2322 + }, + { + "epoch": 0.3986614038098507, + "grad_norm": 2.567416719134394, + "learning_rate": 1.3680005163319045e-05, + "loss": 1.6596, + "step": 2323 + }, + { + "epoch": 0.39883301870602367, + "grad_norm": 2.7342326308550757, + "learning_rate": 1.367483627729124e-05, + "loss": 1.7329, + "step": 2324 + }, + { + "epoch": 0.3990046336021967, + "grad_norm": 3.606800295580907, + "learning_rate": 1.366966625590387e-05, + "loss": 1.6412, + "step": 2325 + }, + { + "epoch": 0.39917624849836963, + "grad_norm": 2.5606493236508103, + "learning_rate": 1.3664495100754238e-05, + "loss": 1.4087, + "step": 2326 + }, + { + "epoch": 0.39934786339454265, + "grad_norm": 3.0716962891561366, + "learning_rate": 1.3659322813440005e-05, + "loss": 1.5322, + "step": 2327 + }, + { + "epoch": 0.39951947829071566, + "grad_norm": 3.42781339334228, + "learning_rate": 1.3654149395559171e-05, + "loss": 1.9995, + "step": 2328 + }, + { + "epoch": 0.3996910931868886, + "grad_norm": 2.943030860004717, + "learning_rate": 1.3648974848710088e-05, + "loss": 1.4968, + "step": 2329 + }, + { + "epoch": 0.3998627080830616, + "grad_norm": 2.853168126549502, + "learning_rate": 1.3643799174491467e-05, + "loss": 1.6148, + "step": 2330 + }, + { + "epoch": 0.4000343229792346, + "grad_norm": 3.402481678784887, + "learning_rate": 1.363862237450235e-05, + "loss": 1.6967, + "step": 2331 + }, + { + "epoch": 0.4002059378754076, + "grad_norm": 5.980167444239729, + "learning_rate": 1.3633444450342142e-05, + "loss": 1.6832, + "step": 2332 + }, + { + "epoch": 0.40037755277158055, + "grad_norm": 3.202001229439437, + "learning_rate": 1.3628265403610588e-05, + "loss": 1.6174, + "step": 2333 + }, + { + "epoch": 0.40054916766775356, + "grad_norm": 2.619648985772445, + "learning_rate": 1.3623085235907779e-05, + "loss": 1.723, + "step": 2334 + }, + { + "epoch": 0.40072078256392657, + "grad_norm": 3.2512830265800767, + "learning_rate": 1.3617903948834155e-05, + "loss": 1.7123, + "step": 2335 + }, + { + "epoch": 0.4008923974600995, + "grad_norm": 2.7856136346218574, + "learning_rate": 1.3612721543990505e-05, + "loss": 1.3425, + "step": 2336 + }, + { + "epoch": 0.40106401235627254, + "grad_norm": 4.225185232909143, + "learning_rate": 1.3607538022977954e-05, + "loss": 1.7757, + "step": 2337 + }, + { + "epoch": 0.4012356272524455, + "grad_norm": 2.344413381213637, + "learning_rate": 1.3602353387397983e-05, + "loss": 1.3488, + "step": 2338 + }, + { + "epoch": 0.4014072421486185, + "grad_norm": 2.39469252637547, + "learning_rate": 1.3597167638852407e-05, + "loss": 1.4525, + "step": 2339 + }, + { + "epoch": 0.4015788570447915, + "grad_norm": 2.6378953989172995, + "learning_rate": 1.3591980778943394e-05, + "loss": 1.8389, + "step": 2340 + }, + { + "epoch": 0.40175047194096447, + "grad_norm": 3.2946064861122837, + "learning_rate": 1.3586792809273447e-05, + "loss": 1.6678, + "step": 2341 + }, + { + "epoch": 0.4019220868371375, + "grad_norm": 2.977018322829481, + "learning_rate": 1.358160373144542e-05, + "loss": 1.6902, + "step": 2342 + }, + { + "epoch": 0.40209370173331044, + "grad_norm": 3.5461002481268245, + "learning_rate": 1.3576413547062502e-05, + "loss": 1.5975, + "step": 2343 + }, + { + "epoch": 0.40226531662948345, + "grad_norm": 3.0455946538432683, + "learning_rate": 1.3571222257728228e-05, + "loss": 1.6142, + "step": 2344 + }, + { + "epoch": 0.4024369315256564, + "grad_norm": 3.761022202203974, + "learning_rate": 1.3566029865046473e-05, + "loss": 1.6955, + "step": 2345 + }, + { + "epoch": 0.4026085464218294, + "grad_norm": 2.8239873542178158, + "learning_rate": 1.3560836370621452e-05, + "loss": 1.6632, + "step": 2346 + }, + { + "epoch": 0.4027801613180024, + "grad_norm": 2.7781326596790885, + "learning_rate": 1.3555641776057729e-05, + "loss": 1.7321, + "step": 2347 + }, + { + "epoch": 0.4029517762141754, + "grad_norm": 3.5853792791806525, + "learning_rate": 1.3550446082960193e-05, + "loss": 1.8062, + "step": 2348 + }, + { + "epoch": 0.4031233911103484, + "grad_norm": 3.178743085830482, + "learning_rate": 1.3545249292934084e-05, + "loss": 1.5969, + "step": 2349 + }, + { + "epoch": 0.40329500600652135, + "grad_norm": 2.594948001736792, + "learning_rate": 1.3540051407584973e-05, + "loss": 1.7455, + "step": 2350 + }, + { + "epoch": 0.40346662090269436, + "grad_norm": 3.302336978380856, + "learning_rate": 1.353485242851878e-05, + "loss": 1.8062, + "step": 2351 + }, + { + "epoch": 0.4036382357988673, + "grad_norm": 3.371819015339554, + "learning_rate": 1.352965235734175e-05, + "loss": 1.8473, + "step": 2352 + }, + { + "epoch": 0.40380985069504033, + "grad_norm": 2.9330305728459813, + "learning_rate": 1.3524451195660472e-05, + "loss": 1.6595, + "step": 2353 + }, + { + "epoch": 0.40398146559121334, + "grad_norm": 2.777512233639564, + "learning_rate": 1.351924894508188e-05, + "loss": 1.6931, + "step": 2354 + }, + { + "epoch": 0.4041530804873863, + "grad_norm": 2.9965345275741218, + "learning_rate": 1.3514045607213227e-05, + "loss": 1.6028, + "step": 2355 + }, + { + "epoch": 0.4043246953835593, + "grad_norm": 3.1946643934831753, + "learning_rate": 1.3508841183662114e-05, + "loss": 1.849, + "step": 2356 + }, + { + "epoch": 0.40449631027973226, + "grad_norm": 3.6069982189040086, + "learning_rate": 1.3503635676036475e-05, + "loss": 1.6741, + "step": 2357 + }, + { + "epoch": 0.4046679251759053, + "grad_norm": 3.8117258427189014, + "learning_rate": 1.3498429085944573e-05, + "loss": 1.6118, + "step": 2358 + }, + { + "epoch": 0.40483954007207823, + "grad_norm": 3.3113205168932427, + "learning_rate": 1.3493221414995021e-05, + "loss": 1.8446, + "step": 2359 + }, + { + "epoch": 0.40501115496825124, + "grad_norm": 3.2308369849589953, + "learning_rate": 1.3488012664796748e-05, + "loss": 1.6205, + "step": 2360 + }, + { + "epoch": 0.40518276986442425, + "grad_norm": 2.2148697739498564, + "learning_rate": 1.3482802836959028e-05, + "loss": 1.8271, + "step": 2361 + }, + { + "epoch": 0.4053543847605972, + "grad_norm": 3.3635089818766457, + "learning_rate": 1.347759193309146e-05, + "loss": 1.8421, + "step": 2362 + }, + { + "epoch": 0.4055259996567702, + "grad_norm": 2.535274577703987, + "learning_rate": 1.3472379954803984e-05, + "loss": 1.5083, + "step": 2363 + }, + { + "epoch": 0.4056976145529432, + "grad_norm": 2.6862321049664497, + "learning_rate": 1.3467166903706865e-05, + "loss": 1.6044, + "step": 2364 + }, + { + "epoch": 0.4058692294491162, + "grad_norm": 3.899943993554853, + "learning_rate": 1.34619527814107e-05, + "loss": 1.5995, + "step": 2365 + }, + { + "epoch": 0.4060408443452892, + "grad_norm": 2.979986999450808, + "learning_rate": 1.3456737589526428e-05, + "loss": 1.6707, + "step": 2366 + }, + { + "epoch": 0.40621245924146215, + "grad_norm": 2.9725980275188215, + "learning_rate": 1.3451521329665297e-05, + "loss": 1.7393, + "step": 2367 + }, + { + "epoch": 0.40638407413763517, + "grad_norm": 2.6612301630184656, + "learning_rate": 1.3446304003438904e-05, + "loss": 1.5188, + "step": 2368 + }, + { + "epoch": 0.4065556890338081, + "grad_norm": 3.584309254377374, + "learning_rate": 1.3441085612459168e-05, + "loss": 1.6723, + "step": 2369 + }, + { + "epoch": 0.40672730392998113, + "grad_norm": 2.9770255494421045, + "learning_rate": 1.3435866158338335e-05, + "loss": 1.6215, + "step": 2370 + }, + { + "epoch": 0.4068989188261541, + "grad_norm": 3.719197503530315, + "learning_rate": 1.343064564268899e-05, + "loss": 1.8583, + "step": 2371 + }, + { + "epoch": 0.4070705337223271, + "grad_norm": 2.930683693078028, + "learning_rate": 1.3425424067124028e-05, + "loss": 1.7005, + "step": 2372 + }, + { + "epoch": 0.4072421486185001, + "grad_norm": 2.643044662897078, + "learning_rate": 1.342020143325669e-05, + "loss": 1.7285, + "step": 2373 + }, + { + "epoch": 0.40741376351467307, + "grad_norm": 3.266690906591515, + "learning_rate": 1.3414977742700528e-05, + "loss": 1.6536, + "step": 2374 + }, + { + "epoch": 0.4075853784108461, + "grad_norm": 3.272319531950153, + "learning_rate": 1.3409752997069437e-05, + "loss": 1.6254, + "step": 2375 + }, + { + "epoch": 0.40775699330701903, + "grad_norm": 3.6428979251322273, + "learning_rate": 1.340452719797762e-05, + "loss": 1.6311, + "step": 2376 + }, + { + "epoch": 0.40792860820319204, + "grad_norm": 3.1756703884844137, + "learning_rate": 1.339930034703962e-05, + "loss": 1.6189, + "step": 2377 + }, + { + "epoch": 0.408100223099365, + "grad_norm": 2.8586468184662275, + "learning_rate": 1.3394072445870301e-05, + "loss": 1.7896, + "step": 2378 + }, + { + "epoch": 0.408271837995538, + "grad_norm": 4.178899943719079, + "learning_rate": 1.3388843496084843e-05, + "loss": 1.7515, + "step": 2379 + }, + { + "epoch": 0.408443452891711, + "grad_norm": 3.882722390523321, + "learning_rate": 1.3383613499298762e-05, + "loss": 1.708, + "step": 2380 + }, + { + "epoch": 0.408615067787884, + "grad_norm": 3.6748393682110043, + "learning_rate": 1.3378382457127893e-05, + "loss": 1.7525, + "step": 2381 + }, + { + "epoch": 0.408786682684057, + "grad_norm": 3.3915391598273312, + "learning_rate": 1.3373150371188388e-05, + "loss": 1.4559, + "step": 2382 + }, + { + "epoch": 0.40895829758022995, + "grad_norm": 3.133401380831929, + "learning_rate": 1.3367917243096736e-05, + "loss": 1.5121, + "step": 2383 + }, + { + "epoch": 0.40912991247640296, + "grad_norm": 3.0944717255055734, + "learning_rate": 1.3362683074469729e-05, + "loss": 1.6274, + "step": 2384 + }, + { + "epoch": 0.4093015273725759, + "grad_norm": 2.8739402739034885, + "learning_rate": 1.3357447866924498e-05, + "loss": 1.5392, + "step": 2385 + }, + { + "epoch": 0.4094731422687489, + "grad_norm": 4.222459155722447, + "learning_rate": 1.335221162207848e-05, + "loss": 1.7225, + "step": 2386 + }, + { + "epoch": 0.40964475716492194, + "grad_norm": 2.813910407334618, + "learning_rate": 1.3346974341549448e-05, + "loss": 1.6782, + "step": 2387 + }, + { + "epoch": 0.4098163720610949, + "grad_norm": 3.0641748631449603, + "learning_rate": 1.334173602695548e-05, + "loss": 1.6292, + "step": 2388 + }, + { + "epoch": 0.4099879869572679, + "grad_norm": 2.6603098511112746, + "learning_rate": 1.3336496679914982e-05, + "loss": 1.735, + "step": 2389 + }, + { + "epoch": 0.41015960185344086, + "grad_norm": 2.8265725469990577, + "learning_rate": 1.3331256302046682e-05, + "loss": 1.7555, + "step": 2390 + }, + { + "epoch": 0.41033121674961387, + "grad_norm": 2.4889522278307656, + "learning_rate": 1.3326014894969614e-05, + "loss": 1.4641, + "step": 2391 + }, + { + "epoch": 0.4105028316457869, + "grad_norm": 2.31789604208341, + "learning_rate": 1.3320772460303145e-05, + "loss": 1.7797, + "step": 2392 + }, + { + "epoch": 0.41067444654195984, + "grad_norm": 3.938043751594953, + "learning_rate": 1.3315528999666944e-05, + "loss": 1.7933, + "step": 2393 + }, + { + "epoch": 0.41084606143813285, + "grad_norm": 3.038438219113065, + "learning_rate": 1.3310284514681012e-05, + "loss": 1.5516, + "step": 2394 + }, + { + "epoch": 0.4110176763343058, + "grad_norm": 2.6736953526838154, + "learning_rate": 1.3305039006965657e-05, + "loss": 1.7417, + "step": 2395 + }, + { + "epoch": 0.4111892912304788, + "grad_norm": 3.063000070423636, + "learning_rate": 1.3299792478141507e-05, + "loss": 1.8241, + "step": 2396 + }, + { + "epoch": 0.41136090612665177, + "grad_norm": 2.9226457280070544, + "learning_rate": 1.3294544929829503e-05, + "loss": 1.5404, + "step": 2397 + }, + { + "epoch": 0.4115325210228248, + "grad_norm": 2.703950525665708, + "learning_rate": 1.32892963636509e-05, + "loss": 1.7731, + "step": 2398 + }, + { + "epoch": 0.4117041359189978, + "grad_norm": 3.382890800600639, + "learning_rate": 1.328404678122727e-05, + "loss": 1.552, + "step": 2399 + }, + { + "epoch": 0.41187575081517075, + "grad_norm": 2.640777331411481, + "learning_rate": 1.3278796184180504e-05, + "loss": 1.7507, + "step": 2400 + }, + { + "epoch": 0.41204736571134376, + "grad_norm": 2.7852046489690165, + "learning_rate": 1.3273544574132792e-05, + "loss": 1.4589, + "step": 2401 + }, + { + "epoch": 0.4122189806075167, + "grad_norm": 3.028048895113507, + "learning_rate": 1.3268291952706659e-05, + "loss": 1.7759, + "step": 2402 + }, + { + "epoch": 0.4123905955036897, + "grad_norm": 2.3876080773025636, + "learning_rate": 1.3263038321524912e-05, + "loss": 1.6372, + "step": 2403 + }, + { + "epoch": 0.4125622103998627, + "grad_norm": 3.017411027837272, + "learning_rate": 1.3257783682210704e-05, + "loss": 1.7451, + "step": 2404 + }, + { + "epoch": 0.4127338252960357, + "grad_norm": 3.094744460017128, + "learning_rate": 1.325252803638747e-05, + "loss": 1.56, + "step": 2405 + }, + { + "epoch": 0.4129054401922087, + "grad_norm": 2.7251727995724853, + "learning_rate": 1.3247271385678975e-05, + "loss": 1.6569, + "step": 2406 + }, + { + "epoch": 0.41307705508838166, + "grad_norm": 2.841943556975302, + "learning_rate": 1.324201373170929e-05, + "loss": 1.6427, + "step": 2407 + }, + { + "epoch": 0.4132486699845547, + "grad_norm": 2.6731575017083515, + "learning_rate": 1.323675507610279e-05, + "loss": 1.487, + "step": 2408 + }, + { + "epoch": 0.41342028488072763, + "grad_norm": 2.607854231517078, + "learning_rate": 1.3231495420484167e-05, + "loss": 1.7433, + "step": 2409 + }, + { + "epoch": 0.41359189977690064, + "grad_norm": 3.010281649227339, + "learning_rate": 1.3226234766478413e-05, + "loss": 1.7266, + "step": 2410 + }, + { + "epoch": 0.4137635146730736, + "grad_norm": 2.4791949736503627, + "learning_rate": 1.3220973115710842e-05, + "loss": 1.5271, + "step": 2411 + }, + { + "epoch": 0.4139351295692466, + "grad_norm": 2.8976582872021557, + "learning_rate": 1.3215710469807063e-05, + "loss": 1.6556, + "step": 2412 + }, + { + "epoch": 0.4141067444654196, + "grad_norm": 2.9294474805422532, + "learning_rate": 1.3210446830392997e-05, + "loss": 1.6142, + "step": 2413 + }, + { + "epoch": 0.4142783593615926, + "grad_norm": 2.6853147595404505, + "learning_rate": 1.3205182199094878e-05, + "loss": 1.5329, + "step": 2414 + }, + { + "epoch": 0.4144499742577656, + "grad_norm": 3.7958933499250382, + "learning_rate": 1.3199916577539233e-05, + "loss": 1.6015, + "step": 2415 + }, + { + "epoch": 0.41462158915393854, + "grad_norm": 3.1940831806276875, + "learning_rate": 1.3194649967352913e-05, + "loss": 1.7348, + "step": 2416 + }, + { + "epoch": 0.41479320405011155, + "grad_norm": 3.1472176380998236, + "learning_rate": 1.3189382370163053e-05, + "loss": 1.7635, + "step": 2417 + }, + { + "epoch": 0.41496481894628456, + "grad_norm": 3.8697757346519266, + "learning_rate": 1.318411378759711e-05, + "loss": 1.6577, + "step": 2418 + }, + { + "epoch": 0.4151364338424575, + "grad_norm": 2.925311450489173, + "learning_rate": 1.317884422128284e-05, + "loss": 1.4937, + "step": 2419 + }, + { + "epoch": 0.41530804873863053, + "grad_norm": 3.953168321266579, + "learning_rate": 1.31735736728483e-05, + "loss": 1.7816, + "step": 2420 + }, + { + "epoch": 0.4154796636348035, + "grad_norm": 2.7129619524129147, + "learning_rate": 1.3168302143921858e-05, + "loss": 1.6684, + "step": 2421 + }, + { + "epoch": 0.4156512785309765, + "grad_norm": 3.218811371511017, + "learning_rate": 1.3163029636132176e-05, + "loss": 1.7717, + "step": 2422 + }, + { + "epoch": 0.41582289342714945, + "grad_norm": 2.904854076316205, + "learning_rate": 1.3157756151108222e-05, + "loss": 1.7628, + "step": 2423 + }, + { + "epoch": 0.41599450832332246, + "grad_norm": 3.3180758454482455, + "learning_rate": 1.315248169047927e-05, + "loss": 1.9778, + "step": 2424 + }, + { + "epoch": 0.4161661232194955, + "grad_norm": 2.504613827973436, + "learning_rate": 1.3147206255874886e-05, + "loss": 1.4349, + "step": 2425 + }, + { + "epoch": 0.41633773811566843, + "grad_norm": 2.861541770015547, + "learning_rate": 1.3141929848924947e-05, + "loss": 1.7279, + "step": 2426 + }, + { + "epoch": 0.41650935301184144, + "grad_norm": 3.3695662367857526, + "learning_rate": 1.3136652471259624e-05, + "loss": 1.7407, + "step": 2427 + }, + { + "epoch": 0.4166809679080144, + "grad_norm": 3.136116894075004, + "learning_rate": 1.3131374124509393e-05, + "loss": 1.5172, + "step": 2428 + }, + { + "epoch": 0.4168525828041874, + "grad_norm": 3.025610091269555, + "learning_rate": 1.3126094810305024e-05, + "loss": 1.7815, + "step": 2429 + }, + { + "epoch": 0.41702419770036037, + "grad_norm": 2.8365900127076413, + "learning_rate": 1.3120814530277585e-05, + "loss": 1.6956, + "step": 2430 + }, + { + "epoch": 0.4171958125965334, + "grad_norm": 2.813125905370307, + "learning_rate": 1.3115533286058453e-05, + "loss": 1.5698, + "step": 2431 + }, + { + "epoch": 0.4173674274927064, + "grad_norm": 3.4584539128938063, + "learning_rate": 1.3110251079279292e-05, + "loss": 1.4555, + "step": 2432 + }, + { + "epoch": 0.41753904238887934, + "grad_norm": 2.5870102182830137, + "learning_rate": 1.3104967911572066e-05, + "loss": 1.4355, + "step": 2433 + }, + { + "epoch": 0.41771065728505236, + "grad_norm": 3.009675308180714, + "learning_rate": 1.3099683784569036e-05, + "loss": 1.5287, + "step": 2434 + }, + { + "epoch": 0.4178822721812253, + "grad_norm": 3.428458138210264, + "learning_rate": 1.3094398699902761e-05, + "loss": 1.4897, + "step": 2435 + }, + { + "epoch": 0.4180538870773983, + "grad_norm": 3.363686747175313, + "learning_rate": 1.3089112659206098e-05, + "loss": 1.783, + "step": 2436 + }, + { + "epoch": 0.4182255019735713, + "grad_norm": 2.99965081566929, + "learning_rate": 1.3083825664112193e-05, + "loss": 1.7388, + "step": 2437 + }, + { + "epoch": 0.4183971168697443, + "grad_norm": 2.7174261587803015, + "learning_rate": 1.3078537716254496e-05, + "loss": 1.7099, + "step": 2438 + }, + { + "epoch": 0.4185687317659173, + "grad_norm": 2.537515826114268, + "learning_rate": 1.3073248817266734e-05, + "loss": 1.5045, + "step": 2439 + }, + { + "epoch": 0.41874034666209026, + "grad_norm": 2.8839849866664653, + "learning_rate": 1.3067958968782952e-05, + "loss": 1.8548, + "step": 2440 + }, + { + "epoch": 0.41891196155826327, + "grad_norm": 2.6449038262998634, + "learning_rate": 1.306266817243747e-05, + "loss": 1.6012, + "step": 2441 + }, + { + "epoch": 0.4190835764544362, + "grad_norm": 2.9347101693935147, + "learning_rate": 1.3057376429864905e-05, + "loss": 1.6123, + "step": 2442 + }, + { + "epoch": 0.41925519135060924, + "grad_norm": 3.1284678526488965, + "learning_rate": 1.3052083742700172e-05, + "loss": 1.6253, + "step": 2443 + }, + { + "epoch": 0.41942680624678225, + "grad_norm": 3.311166284082919, + "learning_rate": 1.3046790112578474e-05, + "loss": 1.751, + "step": 2444 + }, + { + "epoch": 0.4195984211429552, + "grad_norm": 2.8633745386941145, + "learning_rate": 1.3041495541135303e-05, + "loss": 1.7057, + "step": 2445 + }, + { + "epoch": 0.4197700360391282, + "grad_norm": 3.6268574744930064, + "learning_rate": 1.3036200030006443e-05, + "loss": 1.7908, + "step": 2446 + }, + { + "epoch": 0.41994165093530117, + "grad_norm": 3.1110996297433777, + "learning_rate": 1.3030903580827974e-05, + "loss": 1.8263, + "step": 2447 + }, + { + "epoch": 0.4201132658314742, + "grad_norm": 3.2418486119301906, + "learning_rate": 1.3025606195236255e-05, + "loss": 1.8223, + "step": 2448 + }, + { + "epoch": 0.42028488072764714, + "grad_norm": 2.3828221789929533, + "learning_rate": 1.3020307874867946e-05, + "loss": 1.3161, + "step": 2449 + }, + { + "epoch": 0.42045649562382015, + "grad_norm": 2.656760492514176, + "learning_rate": 1.301500862135999e-05, + "loss": 1.6232, + "step": 2450 + }, + { + "epoch": 0.42062811051999316, + "grad_norm": 3.9695577341569015, + "learning_rate": 1.3009708436349613e-05, + "loss": 1.8714, + "step": 2451 + }, + { + "epoch": 0.4207997254161661, + "grad_norm": 3.011636285342456, + "learning_rate": 1.300440732147434e-05, + "loss": 1.6104, + "step": 2452 + }, + { + "epoch": 0.4209713403123391, + "grad_norm": 3.1213909645924116, + "learning_rate": 1.2999105278371977e-05, + "loss": 1.8676, + "step": 2453 + }, + { + "epoch": 0.4211429552085121, + "grad_norm": 2.581359865085188, + "learning_rate": 1.2993802308680615e-05, + "loss": 1.4625, + "step": 2454 + }, + { + "epoch": 0.4213145701046851, + "grad_norm": 3.5425208790720073, + "learning_rate": 1.2988498414038635e-05, + "loss": 1.6825, + "step": 2455 + }, + { + "epoch": 0.42148618500085805, + "grad_norm": 3.146873128662197, + "learning_rate": 1.2983193596084704e-05, + "loss": 1.6977, + "step": 2456 + }, + { + "epoch": 0.42165779989703106, + "grad_norm": 2.768754597582966, + "learning_rate": 1.2977887856457772e-05, + "loss": 1.4894, + "step": 2457 + }, + { + "epoch": 0.42182941479320407, + "grad_norm": 3.582524608584026, + "learning_rate": 1.297258119679707e-05, + "loss": 1.6369, + "step": 2458 + }, + { + "epoch": 0.422001029689377, + "grad_norm": 2.4417208540393163, + "learning_rate": 1.2967273618742125e-05, + "loss": 1.466, + "step": 2459 + }, + { + "epoch": 0.42217264458555004, + "grad_norm": 3.510206071642065, + "learning_rate": 1.2961965123932738e-05, + "loss": 1.701, + "step": 2460 + }, + { + "epoch": 0.422344259481723, + "grad_norm": 2.659599926797607, + "learning_rate": 1.2956655714008992e-05, + "loss": 1.7297, + "step": 2461 + }, + { + "epoch": 0.422515874377896, + "grad_norm": 3.259899806471716, + "learning_rate": 1.2951345390611265e-05, + "loss": 1.5041, + "step": 2462 + }, + { + "epoch": 0.42268748927406896, + "grad_norm": 5.281876660516446, + "learning_rate": 1.2946034155380196e-05, + "loss": 1.7176, + "step": 2463 + }, + { + "epoch": 0.422859104170242, + "grad_norm": 2.889970708269304, + "learning_rate": 1.2940722009956731e-05, + "loss": 1.6845, + "step": 2464 + }, + { + "epoch": 0.423030719066415, + "grad_norm": 3.0943982460867576, + "learning_rate": 1.2935408955982079e-05, + "loss": 1.6487, + "step": 2465 + }, + { + "epoch": 0.42320233396258794, + "grad_norm": 3.192130937293403, + "learning_rate": 1.2930094995097732e-05, + "loss": 1.7028, + "step": 2466 + }, + { + "epoch": 0.42337394885876095, + "grad_norm": 4.018392818013617, + "learning_rate": 1.2924780128945473e-05, + "loss": 1.9719, + "step": 2467 + }, + { + "epoch": 0.4235455637549339, + "grad_norm": 3.023979770776539, + "learning_rate": 1.2919464359167356e-05, + "loss": 1.5915, + "step": 2468 + }, + { + "epoch": 0.4237171786511069, + "grad_norm": 2.9867689907831028, + "learning_rate": 1.291414768740571e-05, + "loss": 1.623, + "step": 2469 + }, + { + "epoch": 0.42388879354727993, + "grad_norm": 3.792612530145307, + "learning_rate": 1.2908830115303149e-05, + "loss": 1.5804, + "step": 2470 + }, + { + "epoch": 0.4240604084434529, + "grad_norm": 2.6266491565189667, + "learning_rate": 1.2903511644502568e-05, + "loss": 1.7264, + "step": 2471 + }, + { + "epoch": 0.4242320233396259, + "grad_norm": 5.1754152604025725, + "learning_rate": 1.2898192276647135e-05, + "loss": 1.5763, + "step": 2472 + }, + { + "epoch": 0.42440363823579885, + "grad_norm": 3.440325934940613, + "learning_rate": 1.2892872013380293e-05, + "loss": 1.6736, + "step": 2473 + }, + { + "epoch": 0.42457525313197186, + "grad_norm": 2.7819449336433157, + "learning_rate": 1.288755085634577e-05, + "loss": 1.694, + "step": 2474 + }, + { + "epoch": 0.4247468680281448, + "grad_norm": 3.376997882586126, + "learning_rate": 1.2882228807187559e-05, + "loss": 1.6484, + "step": 2475 + }, + { + "epoch": 0.42491848292431783, + "grad_norm": 3.114681558827002, + "learning_rate": 1.2876905867549938e-05, + "loss": 1.7088, + "step": 2476 + }, + { + "epoch": 0.42509009782049084, + "grad_norm": 3.334036704464508, + "learning_rate": 1.2871582039077456e-05, + "loss": 1.6168, + "step": 2477 + }, + { + "epoch": 0.4252617127166638, + "grad_norm": 3.369345663161789, + "learning_rate": 1.2866257323414936e-05, + "loss": 1.7059, + "step": 2478 + }, + { + "epoch": 0.4254333276128368, + "grad_norm": 2.650222544411065, + "learning_rate": 1.286093172220748e-05, + "loss": 1.5365, + "step": 2479 + }, + { + "epoch": 0.42560494250900976, + "grad_norm": 3.276973658428143, + "learning_rate": 1.2855605237100454e-05, + "loss": 1.7649, + "step": 2480 + }, + { + "epoch": 0.4257765574051828, + "grad_norm": 2.7366032614387468, + "learning_rate": 1.2850277869739509e-05, + "loss": 1.7724, + "step": 2481 + }, + { + "epoch": 0.42594817230135573, + "grad_norm": 3.4422640524324453, + "learning_rate": 1.2844949621770558e-05, + "loss": 1.7285, + "step": 2482 + }, + { + "epoch": 0.42611978719752874, + "grad_norm": 3.3374359649605543, + "learning_rate": 1.283962049483979e-05, + "loss": 1.6632, + "step": 2483 + }, + { + "epoch": 0.42629140209370175, + "grad_norm": 3.032369116566574, + "learning_rate": 1.2834290490593673e-05, + "loss": 1.5318, + "step": 2484 + }, + { + "epoch": 0.4264630169898747, + "grad_norm": 3.600448353448306, + "learning_rate": 1.282895961067893e-05, + "loss": 1.5226, + "step": 2485 + }, + { + "epoch": 0.4266346318860477, + "grad_norm": 5.2462530037375075, + "learning_rate": 1.2823627856742573e-05, + "loss": 1.8112, + "step": 2486 + }, + { + "epoch": 0.4268062467822207, + "grad_norm": 3.370526626330154, + "learning_rate": 1.2818295230431868e-05, + "loss": 1.8661, + "step": 2487 + }, + { + "epoch": 0.4269778616783937, + "grad_norm": 3.4255163512798243, + "learning_rate": 1.2812961733394357e-05, + "loss": 1.8682, + "step": 2488 + }, + { + "epoch": 0.4271494765745667, + "grad_norm": 3.971550612724134, + "learning_rate": 1.2807627367277858e-05, + "loss": 1.6008, + "step": 2489 + }, + { + "epoch": 0.42732109147073966, + "grad_norm": 3.026515880260373, + "learning_rate": 1.2802292133730443e-05, + "loss": 1.5783, + "step": 2490 + }, + { + "epoch": 0.42749270636691267, + "grad_norm": 3.0694245871201504, + "learning_rate": 1.2796956034400467e-05, + "loss": 1.5052, + "step": 2491 + }, + { + "epoch": 0.4276643212630856, + "grad_norm": 3.6119330081681067, + "learning_rate": 1.2791619070936538e-05, + "loss": 1.6925, + "step": 2492 + }, + { + "epoch": 0.42783593615925863, + "grad_norm": 3.0229506148327543, + "learning_rate": 1.2786281244987546e-05, + "loss": 1.7646, + "step": 2493 + }, + { + "epoch": 0.4280075510554316, + "grad_norm": 3.453197359685171, + "learning_rate": 1.2780942558202633e-05, + "loss": 1.5679, + "step": 2494 + }, + { + "epoch": 0.4281791659516046, + "grad_norm": 2.982202091343988, + "learning_rate": 1.2775603012231218e-05, + "loss": 1.6655, + "step": 2495 + }, + { + "epoch": 0.4283507808477776, + "grad_norm": 3.2921049966667373, + "learning_rate": 1.277026260872298e-05, + "loss": 1.8113, + "step": 2496 + }, + { + "epoch": 0.42852239574395057, + "grad_norm": 3.0090259415730416, + "learning_rate": 1.2764921349327864e-05, + "loss": 1.6267, + "step": 2497 + }, + { + "epoch": 0.4286940106401236, + "grad_norm": 3.0477994828356643, + "learning_rate": 1.275957923569608e-05, + "loss": 1.7909, + "step": 2498 + }, + { + "epoch": 0.42886562553629654, + "grad_norm": 3.275654908227982, + "learning_rate": 1.27542362694781e-05, + "loss": 1.7368, + "step": 2499 + }, + { + "epoch": 0.42903724043246955, + "grad_norm": 3.3348718752774444, + "learning_rate": 1.2748892452324661e-05, + "loss": 1.7296, + "step": 2500 + }, + { + "epoch": 0.4292088553286425, + "grad_norm": 3.391286033750643, + "learning_rate": 1.2743547785886765e-05, + "loss": 1.4789, + "step": 2501 + }, + { + "epoch": 0.4293804702248155, + "grad_norm": 4.007492121640316, + "learning_rate": 1.2738202271815671e-05, + "loss": 1.6838, + "step": 2502 + }, + { + "epoch": 0.4295520851209885, + "grad_norm": 3.098277320150405, + "learning_rate": 1.2732855911762907e-05, + "loss": 1.8444, + "step": 2503 + }, + { + "epoch": 0.4297237000171615, + "grad_norm": 3.1982147931971388, + "learning_rate": 1.2727508707380257e-05, + "loss": 1.7262, + "step": 2504 + }, + { + "epoch": 0.4298953149133345, + "grad_norm": 2.746371600249128, + "learning_rate": 1.2722160660319766e-05, + "loss": 1.5578, + "step": 2505 + }, + { + "epoch": 0.43006692980950745, + "grad_norm": 2.964089110595723, + "learning_rate": 1.271681177223374e-05, + "loss": 1.4712, + "step": 2506 + }, + { + "epoch": 0.43023854470568046, + "grad_norm": 2.9879752219020377, + "learning_rate": 1.2711462044774747e-05, + "loss": 1.8062, + "step": 2507 + }, + { + "epoch": 0.4304101596018534, + "grad_norm": 2.6547171499983238, + "learning_rate": 1.2706111479595613e-05, + "loss": 1.563, + "step": 2508 + }, + { + "epoch": 0.4305817744980264, + "grad_norm": 3.289466648602883, + "learning_rate": 1.270076007834942e-05, + "loss": 1.8348, + "step": 2509 + }, + { + "epoch": 0.43075338939419944, + "grad_norm": 2.70348806902513, + "learning_rate": 1.2695407842689518e-05, + "loss": 1.5201, + "step": 2510 + }, + { + "epoch": 0.4309250042903724, + "grad_norm": 3.371299777829686, + "learning_rate": 1.26900547742695e-05, + "loss": 2.019, + "step": 2511 + }, + { + "epoch": 0.4310966191865454, + "grad_norm": 2.6533607468246148, + "learning_rate": 1.2684700874743228e-05, + "loss": 1.2952, + "step": 2512 + }, + { + "epoch": 0.43126823408271836, + "grad_norm": 2.913953722768904, + "learning_rate": 1.2679346145764812e-05, + "loss": 1.8159, + "step": 2513 + }, + { + "epoch": 0.43143984897889137, + "grad_norm": 3.1425985200207114, + "learning_rate": 1.267399058898863e-05, + "loss": 1.7227, + "step": 2514 + }, + { + "epoch": 0.4316114638750644, + "grad_norm": 3.1812145975071457, + "learning_rate": 1.2668634206069305e-05, + "loss": 1.8711, + "step": 2515 + }, + { + "epoch": 0.43178307877123734, + "grad_norm": 2.778731491270658, + "learning_rate": 1.266327699866172e-05, + "loss": 1.5984, + "step": 2516 + }, + { + "epoch": 0.43195469366741035, + "grad_norm": 2.6114293162120568, + "learning_rate": 1.265791896842101e-05, + "loss": 1.6444, + "step": 2517 + }, + { + "epoch": 0.4321263085635833, + "grad_norm": 3.159336600099177, + "learning_rate": 1.2652560117002567e-05, + "loss": 1.7862, + "step": 2518 + }, + { + "epoch": 0.4322979234597563, + "grad_norm": 2.6823753096696907, + "learning_rate": 1.2647200446062037e-05, + "loss": 1.4726, + "step": 2519 + }, + { + "epoch": 0.4324695383559293, + "grad_norm": 2.6394155243791904, + "learning_rate": 1.2641839957255317e-05, + "loss": 1.6948, + "step": 2520 + }, + { + "epoch": 0.4326411532521023, + "grad_norm": 3.379387051767385, + "learning_rate": 1.2636478652238556e-05, + "loss": 1.5138, + "step": 2521 + }, + { + "epoch": 0.4328127681482753, + "grad_norm": 2.90660532210399, + "learning_rate": 1.2631116532668162e-05, + "loss": 1.7663, + "step": 2522 + }, + { + "epoch": 0.43298438304444825, + "grad_norm": 2.5621277355929717, + "learning_rate": 1.2625753600200784e-05, + "loss": 1.5962, + "step": 2523 + }, + { + "epoch": 0.43315599794062126, + "grad_norm": 2.6803743014738775, + "learning_rate": 1.2620389856493326e-05, + "loss": 1.505, + "step": 2524 + }, + { + "epoch": 0.4333276128367942, + "grad_norm": 2.7100021043004774, + "learning_rate": 1.2615025303202952e-05, + "loss": 1.5001, + "step": 2525 + }, + { + "epoch": 0.43349922773296723, + "grad_norm": 3.2384546908452214, + "learning_rate": 1.260965994198706e-05, + "loss": 1.5471, + "step": 2526 + }, + { + "epoch": 0.4336708426291402, + "grad_norm": 2.504570366147136, + "learning_rate": 1.2604293774503311e-05, + "loss": 1.3906, + "step": 2527 + }, + { + "epoch": 0.4338424575253132, + "grad_norm": 2.6517604418568257, + "learning_rate": 1.259892680240961e-05, + "loss": 1.754, + "step": 2528 + }, + { + "epoch": 0.4340140724214862, + "grad_norm": 3.5500915066469014, + "learning_rate": 1.2593559027364108e-05, + "loss": 1.8265, + "step": 2529 + }, + { + "epoch": 0.43418568731765916, + "grad_norm": 2.6060685890118305, + "learning_rate": 1.2588190451025209e-05, + "loss": 1.8128, + "step": 2530 + }, + { + "epoch": 0.4343573022138322, + "grad_norm": 3.0960523832519042, + "learning_rate": 1.2582821075051561e-05, + "loss": 1.5468, + "step": 2531 + }, + { + "epoch": 0.43452891711000513, + "grad_norm": 3.2563715708076546, + "learning_rate": 1.257745090110206e-05, + "loss": 1.5619, + "step": 2532 + }, + { + "epoch": 0.43470053200617814, + "grad_norm": 2.618860505704093, + "learning_rate": 1.257207993083585e-05, + "loss": 1.8292, + "step": 2533 + }, + { + "epoch": 0.4348721469023511, + "grad_norm": 3.2065405593672485, + "learning_rate": 1.2566708165912322e-05, + "loss": 1.711, + "step": 2534 + }, + { + "epoch": 0.4350437617985241, + "grad_norm": 3.081878371852855, + "learning_rate": 1.2561335607991104e-05, + "loss": 1.5996, + "step": 2535 + }, + { + "epoch": 0.4352153766946971, + "grad_norm": 3.4555479753699974, + "learning_rate": 1.255596225873208e-05, + "loss": 1.9782, + "step": 2536 + }, + { + "epoch": 0.4353869915908701, + "grad_norm": 2.781853752290773, + "learning_rate": 1.2550588119795372e-05, + "loss": 1.5864, + "step": 2537 + }, + { + "epoch": 0.4355586064870431, + "grad_norm": 3.623781301884262, + "learning_rate": 1.254521319284135e-05, + "loss": 1.5382, + "step": 2538 + }, + { + "epoch": 0.43573022138321604, + "grad_norm": 3.4257640805569958, + "learning_rate": 1.2539837479530626e-05, + "loss": 1.6121, + "step": 2539 + }, + { + "epoch": 0.43590183627938905, + "grad_norm": 3.0553389652452947, + "learning_rate": 1.253446098152405e-05, + "loss": 1.8595, + "step": 2540 + }, + { + "epoch": 0.43607345117556207, + "grad_norm": 2.687459921468701, + "learning_rate": 1.2529083700482722e-05, + "loss": 1.6432, + "step": 2541 + }, + { + "epoch": 0.436245066071735, + "grad_norm": 3.1545775205254984, + "learning_rate": 1.252370563806798e-05, + "loss": 1.5666, + "step": 2542 + }, + { + "epoch": 0.43641668096790803, + "grad_norm": 2.8986822006365127, + "learning_rate": 1.2518326795941405e-05, + "loss": 1.6598, + "step": 2543 + }, + { + "epoch": 0.436588295864081, + "grad_norm": 4.054391636555573, + "learning_rate": 1.2512947175764815e-05, + "loss": 1.7002, + "step": 2544 + }, + { + "epoch": 0.436759910760254, + "grad_norm": 3.578548290273076, + "learning_rate": 1.2507566779200273e-05, + "loss": 1.5659, + "step": 2545 + }, + { + "epoch": 0.43693152565642696, + "grad_norm": 3.2339857848140503, + "learning_rate": 1.2502185607910082e-05, + "loss": 1.6849, + "step": 2546 + }, + { + "epoch": 0.43710314055259997, + "grad_norm": 2.678436382132492, + "learning_rate": 1.249680366355678e-05, + "loss": 1.8115, + "step": 2547 + }, + { + "epoch": 0.437274755448773, + "grad_norm": 3.488463153827224, + "learning_rate": 1.2491420947803148e-05, + "loss": 1.5979, + "step": 2548 + }, + { + "epoch": 0.43744637034494593, + "grad_norm": 3.0841260106537947, + "learning_rate": 1.2486037462312202e-05, + "loss": 1.8077, + "step": 2549 + }, + { + "epoch": 0.43761798524111895, + "grad_norm": 3.329999027823043, + "learning_rate": 1.2480653208747198e-05, + "loss": 1.6684, + "step": 2550 + }, + { + "epoch": 0.4377896001372919, + "grad_norm": 3.355867133233169, + "learning_rate": 1.2475268188771628e-05, + "loss": 1.8114, + "step": 2551 + }, + { + "epoch": 0.4379612150334649, + "grad_norm": 2.9116058326019285, + "learning_rate": 1.2469882404049227e-05, + "loss": 1.6893, + "step": 2552 + }, + { + "epoch": 0.43813282992963787, + "grad_norm": 3.819052716207462, + "learning_rate": 1.2464495856243955e-05, + "loss": 1.7297, + "step": 2553 + }, + { + "epoch": 0.4383044448258109, + "grad_norm": 6.273556976261411, + "learning_rate": 1.2459108547020014e-05, + "loss": 1.8098, + "step": 2554 + }, + { + "epoch": 0.4384760597219839, + "grad_norm": 6.812357201085777, + "learning_rate": 1.2453720478041842e-05, + "loss": 1.5496, + "step": 2555 + }, + { + "epoch": 0.43864767461815685, + "grad_norm": 3.101693527083838, + "learning_rate": 1.2448331650974113e-05, + "loss": 1.6682, + "step": 2556 + }, + { + "epoch": 0.43881928951432986, + "grad_norm": 3.7034135251626763, + "learning_rate": 1.244294206748173e-05, + "loss": 1.5942, + "step": 2557 + }, + { + "epoch": 0.4389909044105028, + "grad_norm": 3.5477499843622877, + "learning_rate": 1.2437551729229835e-05, + "loss": 1.7179, + "step": 2558 + }, + { + "epoch": 0.4391625193066758, + "grad_norm": 3.0806240986978275, + "learning_rate": 1.2432160637883794e-05, + "loss": 1.7759, + "step": 2559 + }, + { + "epoch": 0.4393341342028488, + "grad_norm": 3.552008839972291, + "learning_rate": 1.2426768795109223e-05, + "loss": 1.7965, + "step": 2560 + }, + { + "epoch": 0.4395057490990218, + "grad_norm": 3.1483070739792582, + "learning_rate": 1.2421376202571951e-05, + "loss": 1.5987, + "step": 2561 + }, + { + "epoch": 0.4396773639951948, + "grad_norm": 2.7763357843333165, + "learning_rate": 1.2415982861938049e-05, + "loss": 1.5757, + "step": 2562 + }, + { + "epoch": 0.43984897889136776, + "grad_norm": 2.922576816265704, + "learning_rate": 1.2410588774873822e-05, + "loss": 1.8297, + "step": 2563 + }, + { + "epoch": 0.44002059378754077, + "grad_norm": 2.641247574186035, + "learning_rate": 1.2405193943045797e-05, + "loss": 1.382, + "step": 2564 + }, + { + "epoch": 0.4401922086837137, + "grad_norm": 3.122360740773097, + "learning_rate": 1.2399798368120735e-05, + "loss": 1.4621, + "step": 2565 + }, + { + "epoch": 0.44036382357988674, + "grad_norm": 3.6300781946347467, + "learning_rate": 1.2394402051765624e-05, + "loss": 1.732, + "step": 2566 + }, + { + "epoch": 0.44053543847605975, + "grad_norm": 3.6406195478337113, + "learning_rate": 1.2389004995647689e-05, + "loss": 2.0176, + "step": 2567 + }, + { + "epoch": 0.4407070533722327, + "grad_norm": 3.270127926494256, + "learning_rate": 1.2383607201434378e-05, + "loss": 1.618, + "step": 2568 + }, + { + "epoch": 0.4408786682684057, + "grad_norm": 2.622356348934535, + "learning_rate": 1.2378208670793361e-05, + "loss": 1.7736, + "step": 2569 + }, + { + "epoch": 0.44105028316457867, + "grad_norm": 2.9566685792430785, + "learning_rate": 1.2372809405392551e-05, + "loss": 1.6427, + "step": 2570 + }, + { + "epoch": 0.4412218980607517, + "grad_norm": 2.5076323684799164, + "learning_rate": 1.236740940690007e-05, + "loss": 1.5429, + "step": 2571 + }, + { + "epoch": 0.44139351295692464, + "grad_norm": 2.6831721794783587, + "learning_rate": 1.2362008676984281e-05, + "loss": 1.5456, + "step": 2572 + }, + { + "epoch": 0.44156512785309765, + "grad_norm": 2.823296283918164, + "learning_rate": 1.2356607217313765e-05, + "loss": 1.7139, + "step": 2573 + }, + { + "epoch": 0.44173674274927066, + "grad_norm": 4.120667728216354, + "learning_rate": 1.235120502955733e-05, + "loss": 1.9691, + "step": 2574 + }, + { + "epoch": 0.4419083576454436, + "grad_norm": 2.8717970757022693, + "learning_rate": 1.2345802115384014e-05, + "loss": 1.674, + "step": 2575 + }, + { + "epoch": 0.44207997254161663, + "grad_norm": 2.53361818564715, + "learning_rate": 1.2340398476463069e-05, + "loss": 1.5349, + "step": 2576 + }, + { + "epoch": 0.4422515874377896, + "grad_norm": 2.4607102792534556, + "learning_rate": 1.233499411446398e-05, + "loss": 1.6835, + "step": 2577 + }, + { + "epoch": 0.4424232023339626, + "grad_norm": 8.685491447194668, + "learning_rate": 1.2329589031056454e-05, + "loss": 1.5918, + "step": 2578 + }, + { + "epoch": 0.44259481723013555, + "grad_norm": 3.0363771532608754, + "learning_rate": 1.2324183227910418e-05, + "loss": 1.6851, + "step": 2579 + }, + { + "epoch": 0.44276643212630856, + "grad_norm": 2.5378929876143603, + "learning_rate": 1.2318776706696021e-05, + "loss": 1.9006, + "step": 2580 + }, + { + "epoch": 0.4429380470224816, + "grad_norm": 2.374330167558309, + "learning_rate": 1.2313369469083636e-05, + "loss": 1.423, + "step": 2581 + }, + { + "epoch": 0.44310966191865453, + "grad_norm": 3.053470888666547, + "learning_rate": 1.2307961516743862e-05, + "loss": 1.3583, + "step": 2582 + }, + { + "epoch": 0.44328127681482754, + "grad_norm": 3.2647427085424625, + "learning_rate": 1.2302552851347506e-05, + "loss": 1.5868, + "step": 2583 + }, + { + "epoch": 0.4434528917110005, + "grad_norm": 3.886072547187632, + "learning_rate": 1.229714347456561e-05, + "loss": 1.7482, + "step": 2584 + }, + { + "epoch": 0.4436245066071735, + "grad_norm": 3.0849422437361693, + "learning_rate": 1.2291733388069422e-05, + "loss": 1.5925, + "step": 2585 + }, + { + "epoch": 0.44379612150334646, + "grad_norm": 2.966983680827161, + "learning_rate": 1.228632259353042e-05, + "loss": 1.6899, + "step": 2586 + }, + { + "epoch": 0.4439677363995195, + "grad_norm": 2.546985271873544, + "learning_rate": 1.2280911092620298e-05, + "loss": 1.7253, + "step": 2587 + }, + { + "epoch": 0.4441393512956925, + "grad_norm": 3.1319908880495766, + "learning_rate": 1.2275498887010963e-05, + "loss": 1.7757, + "step": 2588 + }, + { + "epoch": 0.44431096619186544, + "grad_norm": 4.507214578446561, + "learning_rate": 1.2270085978374545e-05, + "loss": 1.6496, + "step": 2589 + }, + { + "epoch": 0.44448258108803845, + "grad_norm": 3.400663752161522, + "learning_rate": 1.2264672368383392e-05, + "loss": 1.7275, + "step": 2590 + }, + { + "epoch": 0.4446541959842114, + "grad_norm": 4.2543266896916005, + "learning_rate": 1.225925805871007e-05, + "loss": 1.6991, + "step": 2591 + }, + { + "epoch": 0.4448258108803844, + "grad_norm": 3.014388705452953, + "learning_rate": 1.2253843051027347e-05, + "loss": 1.7271, + "step": 2592 + }, + { + "epoch": 0.44499742577655743, + "grad_norm": 2.7174643747910965, + "learning_rate": 1.2248427347008224e-05, + "loss": 1.8008, + "step": 2593 + }, + { + "epoch": 0.4451690406727304, + "grad_norm": 3.1056334300432025, + "learning_rate": 1.2243010948325918e-05, + "loss": 1.8224, + "step": 2594 + }, + { + "epoch": 0.4453406555689034, + "grad_norm": 3.0515225248042803, + "learning_rate": 1.2237593856653838e-05, + "loss": 1.7033, + "step": 2595 + }, + { + "epoch": 0.44551227046507635, + "grad_norm": 3.784391122633235, + "learning_rate": 1.2232176073665631e-05, + "loss": 1.5208, + "step": 2596 + }, + { + "epoch": 0.44568388536124937, + "grad_norm": 2.8289385952920276, + "learning_rate": 1.222675760103515e-05, + "loss": 1.5897, + "step": 2597 + }, + { + "epoch": 0.4458555002574223, + "grad_norm": 2.4983918490154715, + "learning_rate": 1.2221338440436456e-05, + "loss": 1.7346, + "step": 2598 + }, + { + "epoch": 0.44602711515359533, + "grad_norm": 3.269913607176651, + "learning_rate": 1.221591859354383e-05, + "loss": 1.6967, + "step": 2599 + }, + { + "epoch": 0.44619873004976834, + "grad_norm": 3.2950159496429117, + "learning_rate": 1.2210498062031759e-05, + "loss": 1.6056, + "step": 2600 + }, + { + "epoch": 0.4463703449459413, + "grad_norm": 2.9395221388436417, + "learning_rate": 1.2205076847574945e-05, + "loss": 1.5197, + "step": 2601 + }, + { + "epoch": 0.4465419598421143, + "grad_norm": 3.1248826064471156, + "learning_rate": 1.2199654951848301e-05, + "loss": 1.9075, + "step": 2602 + }, + { + "epoch": 0.44671357473828727, + "grad_norm": 3.304105023360127, + "learning_rate": 1.2194232376526948e-05, + "loss": 1.8778, + "step": 2603 + }, + { + "epoch": 0.4468851896344603, + "grad_norm": 2.982924041085048, + "learning_rate": 1.218880912328622e-05, + "loss": 1.7241, + "step": 2604 + }, + { + "epoch": 0.44705680453063323, + "grad_norm": 2.437170032811617, + "learning_rate": 1.2183385193801655e-05, + "loss": 1.5812, + "step": 2605 + }, + { + "epoch": 0.44722841942680625, + "grad_norm": 3.027329515915909, + "learning_rate": 1.2177960589749014e-05, + "loss": 1.9349, + "step": 2606 + }, + { + "epoch": 0.44740003432297926, + "grad_norm": 3.851117223695634, + "learning_rate": 1.2172535312804243e-05, + "loss": 1.8446, + "step": 2607 + }, + { + "epoch": 0.4475716492191522, + "grad_norm": 3.193010343206526, + "learning_rate": 1.2167109364643518e-05, + "loss": 1.6222, + "step": 2608 + }, + { + "epoch": 0.4477432641153252, + "grad_norm": 3.537264674635834, + "learning_rate": 1.216168274694321e-05, + "loss": 1.8511, + "step": 2609 + }, + { + "epoch": 0.4479148790114982, + "grad_norm": 2.751241739903351, + "learning_rate": 1.2156255461379903e-05, + "loss": 1.7101, + "step": 2610 + }, + { + "epoch": 0.4480864939076712, + "grad_norm": 2.603927554587191, + "learning_rate": 1.2150827509630383e-05, + "loss": 1.6773, + "step": 2611 + }, + { + "epoch": 0.44825810880384415, + "grad_norm": 3.0629529017505117, + "learning_rate": 1.2145398893371645e-05, + "loss": 1.7964, + "step": 2612 + }, + { + "epoch": 0.44842972370001716, + "grad_norm": 2.9105349444727318, + "learning_rate": 1.2139969614280886e-05, + "loss": 1.4692, + "step": 2613 + }, + { + "epoch": 0.44860133859619017, + "grad_norm": 3.2146378611423043, + "learning_rate": 1.2134539674035511e-05, + "loss": 1.967, + "step": 2614 + }, + { + "epoch": 0.4487729534923631, + "grad_norm": 4.810921855735679, + "learning_rate": 1.2129109074313126e-05, + "loss": 1.7202, + "step": 2615 + }, + { + "epoch": 0.44894456838853614, + "grad_norm": 3.859967797231811, + "learning_rate": 1.2123677816791546e-05, + "loss": 1.7944, + "step": 2616 + }, + { + "epoch": 0.4491161832847091, + "grad_norm": 2.6340297117677736, + "learning_rate": 1.2118245903148782e-05, + "loss": 1.519, + "step": 2617 + }, + { + "epoch": 0.4492877981808821, + "grad_norm": 3.1083475224696118, + "learning_rate": 1.2112813335063058e-05, + "loss": 1.7049, + "step": 2618 + }, + { + "epoch": 0.4494594130770551, + "grad_norm": 2.857870209351344, + "learning_rate": 1.2107380114212788e-05, + "loss": 1.6021, + "step": 2619 + }, + { + "epoch": 0.44963102797322807, + "grad_norm": 2.7377907307525353, + "learning_rate": 1.2101946242276594e-05, + "loss": 1.5989, + "step": 2620 + }, + { + "epoch": 0.4498026428694011, + "grad_norm": 6.086643739019491, + "learning_rate": 1.2096511720933301e-05, + "loss": 1.8588, + "step": 2621 + }, + { + "epoch": 0.44997425776557404, + "grad_norm": 2.8857916443680582, + "learning_rate": 1.2091076551861931e-05, + "loss": 1.6858, + "step": 2622 + }, + { + "epoch": 0.45014587266174705, + "grad_norm": 3.31440482177447, + "learning_rate": 1.2085640736741708e-05, + "loss": 1.6271, + "step": 2623 + }, + { + "epoch": 0.45031748755792, + "grad_norm": 3.7446601859902917, + "learning_rate": 1.2080204277252055e-05, + "loss": 1.8249, + "step": 2624 + }, + { + "epoch": 0.450489102454093, + "grad_norm": 2.708340819102802, + "learning_rate": 1.2074767175072592e-05, + "loss": 1.6096, + "step": 2625 + }, + { + "epoch": 0.450660717350266, + "grad_norm": 2.604841225539455, + "learning_rate": 1.2069329431883144e-05, + "loss": 1.6638, + "step": 2626 + }, + { + "epoch": 0.450832332246439, + "grad_norm": 4.256384406631859, + "learning_rate": 1.2063891049363725e-05, + "loss": 1.4591, + "step": 2627 + }, + { + "epoch": 0.451003947142612, + "grad_norm": 2.8697090254577313, + "learning_rate": 1.2058452029194556e-05, + "loss": 1.7297, + "step": 2628 + }, + { + "epoch": 0.45117556203878495, + "grad_norm": 3.262288916173502, + "learning_rate": 1.2053012373056043e-05, + "loss": 1.5255, + "step": 2629 + }, + { + "epoch": 0.45134717693495796, + "grad_norm": 3.032200308239193, + "learning_rate": 1.2047572082628807e-05, + "loss": 1.8503, + "step": 2630 + }, + { + "epoch": 0.4515187918311309, + "grad_norm": 3.1295044735038404, + "learning_rate": 1.2042131159593643e-05, + "loss": 1.6148, + "step": 2631 + }, + { + "epoch": 0.45169040672730393, + "grad_norm": 2.883092782789536, + "learning_rate": 1.2036689605631557e-05, + "loss": 1.8782, + "step": 2632 + }, + { + "epoch": 0.45186202162347694, + "grad_norm": 3.4864386866907044, + "learning_rate": 1.2031247422423746e-05, + "loss": 1.6105, + "step": 2633 + }, + { + "epoch": 0.4520336365196499, + "grad_norm": 3.420573358637991, + "learning_rate": 1.2025804611651595e-05, + "loss": 1.6429, + "step": 2634 + }, + { + "epoch": 0.4522052514158229, + "grad_norm": 2.9253390573924696, + "learning_rate": 1.2020361174996694e-05, + "loss": 1.4733, + "step": 2635 + }, + { + "epoch": 0.45237686631199586, + "grad_norm": 3.2411935585420366, + "learning_rate": 1.2014917114140823e-05, + "loss": 1.665, + "step": 2636 + }, + { + "epoch": 0.4525484812081689, + "grad_norm": 3.558327855457531, + "learning_rate": 1.2009472430765945e-05, + "loss": 1.7214, + "step": 2637 + }, + { + "epoch": 0.45272009610434183, + "grad_norm": 3.7710362239244937, + "learning_rate": 1.2004027126554225e-05, + "loss": 1.5841, + "step": 2638 + }, + { + "epoch": 0.45289171100051484, + "grad_norm": 3.9690226416401333, + "learning_rate": 1.1998581203188022e-05, + "loss": 1.6546, + "step": 2639 + }, + { + "epoch": 0.45306332589668785, + "grad_norm": 2.7141630145160724, + "learning_rate": 1.199313466234988e-05, + "loss": 1.4723, + "step": 2640 + }, + { + "epoch": 0.4532349407928608, + "grad_norm": 3.1166574525253345, + "learning_rate": 1.1987687505722532e-05, + "loss": 1.4558, + "step": 2641 + }, + { + "epoch": 0.4534065556890338, + "grad_norm": 3.4493588934008863, + "learning_rate": 1.1982239734988912e-05, + "loss": 1.6947, + "step": 2642 + }, + { + "epoch": 0.4535781705852068, + "grad_norm": 2.8326284353603284, + "learning_rate": 1.1976791351832128e-05, + "loss": 1.731, + "step": 2643 + }, + { + "epoch": 0.4537497854813798, + "grad_norm": 3.561561852429182, + "learning_rate": 1.1971342357935491e-05, + "loss": 1.6676, + "step": 2644 + }, + { + "epoch": 0.4539214003775528, + "grad_norm": 3.462669755214249, + "learning_rate": 1.1965892754982502e-05, + "loss": 1.747, + "step": 2645 + }, + { + "epoch": 0.45409301527372575, + "grad_norm": 2.9375413525713037, + "learning_rate": 1.1960442544656832e-05, + "loss": 1.6186, + "step": 2646 + }, + { + "epoch": 0.45426463016989876, + "grad_norm": 2.7526503418153494, + "learning_rate": 1.1954991728642358e-05, + "loss": 1.6154, + "step": 2647 + }, + { + "epoch": 0.4544362450660717, + "grad_norm": 3.684326053700525, + "learning_rate": 1.1949540308623138e-05, + "loss": 1.7381, + "step": 2648 + }, + { + "epoch": 0.45460785996224473, + "grad_norm": 3.3167738095803356, + "learning_rate": 1.1944088286283414e-05, + "loss": 1.6568, + "step": 2649 + }, + { + "epoch": 0.4547794748584177, + "grad_norm": 3.0301356368030583, + "learning_rate": 1.1938635663307617e-05, + "loss": 1.7196, + "step": 2650 + }, + { + "epoch": 0.4549510897545907, + "grad_norm": 2.3088339456853477, + "learning_rate": 1.1933182441380362e-05, + "loss": 1.5877, + "step": 2651 + }, + { + "epoch": 0.4551227046507637, + "grad_norm": 2.7664559466035787, + "learning_rate": 1.1927728622186455e-05, + "loss": 1.7043, + "step": 2652 + }, + { + "epoch": 0.45529431954693667, + "grad_norm": 2.8404715224080244, + "learning_rate": 1.1922274207410874e-05, + "loss": 1.598, + "step": 2653 + }, + { + "epoch": 0.4554659344431097, + "grad_norm": 3.6439976766696143, + "learning_rate": 1.1916819198738794e-05, + "loss": 1.6171, + "step": 2654 + }, + { + "epoch": 0.45563754933928263, + "grad_norm": 3.7464085059845873, + "learning_rate": 1.1911363597855563e-05, + "loss": 1.5376, + "step": 2655 + }, + { + "epoch": 0.45580916423545564, + "grad_norm": 2.753182859305328, + "learning_rate": 1.190590740644672e-05, + "loss": 1.6349, + "step": 2656 + }, + { + "epoch": 0.4559807791316286, + "grad_norm": 3.0041484309612083, + "learning_rate": 1.1900450626197987e-05, + "loss": 1.7227, + "step": 2657 + }, + { + "epoch": 0.4561523940278016, + "grad_norm": 2.7284218874273454, + "learning_rate": 1.1894993258795258e-05, + "loss": 1.7016, + "step": 2658 + }, + { + "epoch": 0.4563240089239746, + "grad_norm": 3.317737226332948, + "learning_rate": 1.1889535305924619e-05, + "loss": 1.821, + "step": 2659 + }, + { + "epoch": 0.4564956238201476, + "grad_norm": 3.025170055300962, + "learning_rate": 1.188407676927233e-05, + "loss": 1.5479, + "step": 2660 + }, + { + "epoch": 0.4566672387163206, + "grad_norm": 2.777247628260142, + "learning_rate": 1.1878617650524832e-05, + "loss": 1.7292, + "step": 2661 + }, + { + "epoch": 0.45683885361249355, + "grad_norm": 3.3885661872740664, + "learning_rate": 1.1873157951368757e-05, + "loss": 1.7317, + "step": 2662 + }, + { + "epoch": 0.45701046850866656, + "grad_norm": 2.6438284387144746, + "learning_rate": 1.1867697673490897e-05, + "loss": 1.7308, + "step": 2663 + }, + { + "epoch": 0.45718208340483957, + "grad_norm": 3.507458287421913, + "learning_rate": 1.1862236818578239e-05, + "loss": 1.598, + "step": 2664 + }, + { + "epoch": 0.4573536983010125, + "grad_norm": 2.910386059235125, + "learning_rate": 1.1856775388317936e-05, + "loss": 1.7031, + "step": 2665 + }, + { + "epoch": 0.45752531319718553, + "grad_norm": 2.9085010167200527, + "learning_rate": 1.1851313384397334e-05, + "loss": 1.6195, + "step": 2666 + }, + { + "epoch": 0.4576969280933585, + "grad_norm": 3.1498527021788005, + "learning_rate": 1.1845850808503939e-05, + "loss": 1.7393, + "step": 2667 + }, + { + "epoch": 0.4578685429895315, + "grad_norm": 3.2923162633083547, + "learning_rate": 1.1840387662325443e-05, + "loss": 1.7012, + "step": 2668 + }, + { + "epoch": 0.45804015788570446, + "grad_norm": 2.9412764936047098, + "learning_rate": 1.183492394754972e-05, + "loss": 1.6338, + "step": 2669 + }, + { + "epoch": 0.45821177278187747, + "grad_norm": 3.1298385869917182, + "learning_rate": 1.1829459665864806e-05, + "loss": 1.7253, + "step": 2670 + }, + { + "epoch": 0.4583833876780505, + "grad_norm": 3.351910431765254, + "learning_rate": 1.1823994818958922e-05, + "loss": 1.6607, + "step": 2671 + }, + { + "epoch": 0.45855500257422344, + "grad_norm": 2.8272532550704863, + "learning_rate": 1.1818529408520458e-05, + "loss": 1.691, + "step": 2672 + }, + { + "epoch": 0.45872661747039645, + "grad_norm": 2.7253444714846187, + "learning_rate": 1.1813063436237981e-05, + "loss": 1.5327, + "step": 2673 + }, + { + "epoch": 0.4588982323665694, + "grad_norm": 2.669128107949888, + "learning_rate": 1.1807596903800234e-05, + "loss": 1.6484, + "step": 2674 + }, + { + "epoch": 0.4590698472627424, + "grad_norm": 2.777250932196786, + "learning_rate": 1.1802129812896128e-05, + "loss": 1.5595, + "step": 2675 + }, + { + "epoch": 0.45924146215891537, + "grad_norm": 2.827236054270125, + "learning_rate": 1.1796662165214752e-05, + "loss": 1.5394, + "step": 2676 + }, + { + "epoch": 0.4594130770550884, + "grad_norm": 4.2681465656367195, + "learning_rate": 1.1791193962445358e-05, + "loss": 1.7712, + "step": 2677 + }, + { + "epoch": 0.4595846919512614, + "grad_norm": 2.963496890023793, + "learning_rate": 1.178572520627738e-05, + "loss": 1.851, + "step": 2678 + }, + { + "epoch": 0.45975630684743435, + "grad_norm": 3.074764507272029, + "learning_rate": 1.1780255898400417e-05, + "loss": 1.696, + "step": 2679 + }, + { + "epoch": 0.45992792174360736, + "grad_norm": 3.3183106790862897, + "learning_rate": 1.1774786040504238e-05, + "loss": 1.7456, + "step": 2680 + }, + { + "epoch": 0.4600995366397803, + "grad_norm": 2.7424655175457757, + "learning_rate": 1.1769315634278791e-05, + "loss": 1.4808, + "step": 2681 + }, + { + "epoch": 0.4602711515359533, + "grad_norm": 2.823533839590037, + "learning_rate": 1.1763844681414177e-05, + "loss": 1.7836, + "step": 2682 + }, + { + "epoch": 0.4604427664321263, + "grad_norm": 3.590904953923604, + "learning_rate": 1.1758373183600678e-05, + "loss": 1.7765, + "step": 2683 + }, + { + "epoch": 0.4606143813282993, + "grad_norm": 7.192544980367173, + "learning_rate": 1.1752901142528743e-05, + "loss": 1.7518, + "step": 2684 + }, + { + "epoch": 0.4607859962244723, + "grad_norm": 7.8031651625040785, + "learning_rate": 1.1747428559888985e-05, + "loss": 1.7665, + "step": 2685 + }, + { + "epoch": 0.46095761112064526, + "grad_norm": 3.340410247520449, + "learning_rate": 1.174195543737219e-05, + "loss": 1.7036, + "step": 2686 + }, + { + "epoch": 0.46112922601681827, + "grad_norm": 3.3075338998799113, + "learning_rate": 1.1736481776669307e-05, + "loss": 1.6083, + "step": 2687 + }, + { + "epoch": 0.46130084091299123, + "grad_norm": 3.8067125939464765, + "learning_rate": 1.1731007579471445e-05, + "loss": 1.8282, + "step": 2688 + }, + { + "epoch": 0.46147245580916424, + "grad_norm": 5.399574711714645, + "learning_rate": 1.1725532847469892e-05, + "loss": 1.4582, + "step": 2689 + }, + { + "epoch": 0.46164407070533725, + "grad_norm": 2.9299575208365343, + "learning_rate": 1.1720057582356096e-05, + "loss": 1.782, + "step": 2690 + }, + { + "epoch": 0.4618156856015102, + "grad_norm": 4.5794181157037785, + "learning_rate": 1.1714581785821662e-05, + "loss": 1.7492, + "step": 2691 + }, + { + "epoch": 0.4619873004976832, + "grad_norm": 2.8746188121776295, + "learning_rate": 1.1709105459558367e-05, + "loss": 1.6018, + "step": 2692 + }, + { + "epoch": 0.4621589153938562, + "grad_norm": 2.5910494670148787, + "learning_rate": 1.1703628605258157e-05, + "loss": 1.5519, + "step": 2693 + }, + { + "epoch": 0.4623305302900292, + "grad_norm": 2.765781281656966, + "learning_rate": 1.1698151224613126e-05, + "loss": 1.6565, + "step": 2694 + }, + { + "epoch": 0.46250214518620214, + "grad_norm": 3.258243752870418, + "learning_rate": 1.1692673319315541e-05, + "loss": 1.7346, + "step": 2695 + }, + { + "epoch": 0.46267376008237515, + "grad_norm": 2.659443629422332, + "learning_rate": 1.1687194891057834e-05, + "loss": 1.6452, + "step": 2696 + }, + { + "epoch": 0.46284537497854816, + "grad_norm": 2.829519925969302, + "learning_rate": 1.1681715941532583e-05, + "loss": 1.7583, + "step": 2697 + }, + { + "epoch": 0.4630169898747211, + "grad_norm": 2.926030715759295, + "learning_rate": 1.1676236472432549e-05, + "loss": 1.5648, + "step": 2698 + }, + { + "epoch": 0.46318860477089413, + "grad_norm": 3.6639245475998012, + "learning_rate": 1.1670756485450638e-05, + "loss": 1.6513, + "step": 2699 + }, + { + "epoch": 0.4633602196670671, + "grad_norm": 2.408512108128582, + "learning_rate": 1.1665275982279918e-05, + "loss": 1.403, + "step": 2700 + }, + { + "epoch": 0.4635318345632401, + "grad_norm": 2.763291247900218, + "learning_rate": 1.1659794964613619e-05, + "loss": 1.4791, + "step": 2701 + }, + { + "epoch": 0.46370344945941305, + "grad_norm": 2.682938418598613, + "learning_rate": 1.1654313434145133e-05, + "loss": 1.7929, + "step": 2702 + }, + { + "epoch": 0.46387506435558606, + "grad_norm": 3.0475204545208685, + "learning_rate": 1.1648831392568005e-05, + "loss": 1.5719, + "step": 2703 + }, + { + "epoch": 0.4640466792517591, + "grad_norm": 3.3354148124435863, + "learning_rate": 1.1643348841575936e-05, + "loss": 1.7212, + "step": 2704 + }, + { + "epoch": 0.46421829414793203, + "grad_norm": 2.757606244850724, + "learning_rate": 1.1637865782862799e-05, + "loss": 1.6638, + "step": 2705 + }, + { + "epoch": 0.46438990904410504, + "grad_norm": 3.342908392786111, + "learning_rate": 1.16323822181226e-05, + "loss": 1.7582, + "step": 2706 + }, + { + "epoch": 0.464561523940278, + "grad_norm": 2.443113216571998, + "learning_rate": 1.1626898149049523e-05, + "loss": 1.5732, + "step": 2707 + }, + { + "epoch": 0.464733138836451, + "grad_norm": 3.0684047175990234, + "learning_rate": 1.1621413577337897e-05, + "loss": 1.5908, + "step": 2708 + }, + { + "epoch": 0.46490475373262397, + "grad_norm": 3.1924423834773026, + "learning_rate": 1.1615928504682208e-05, + "loss": 1.8335, + "step": 2709 + }, + { + "epoch": 0.465076368628797, + "grad_norm": 3.8904689495166744, + "learning_rate": 1.1610442932777098e-05, + "loss": 1.9934, + "step": 2710 + }, + { + "epoch": 0.46524798352497, + "grad_norm": 3.089336450071173, + "learning_rate": 1.1604956863317365e-05, + "loss": 1.6054, + "step": 2711 + }, + { + "epoch": 0.46541959842114294, + "grad_norm": 3.2967036052136587, + "learning_rate": 1.1599470297997955e-05, + "loss": 1.5544, + "step": 2712 + }, + { + "epoch": 0.46559121331731596, + "grad_norm": 2.5759711359977615, + "learning_rate": 1.1593983238513971e-05, + "loss": 1.4312, + "step": 2713 + }, + { + "epoch": 0.4657628282134889, + "grad_norm": 2.822059112800912, + "learning_rate": 1.1588495686560667e-05, + "loss": 1.5536, + "step": 2714 + }, + { + "epoch": 0.4659344431096619, + "grad_norm": 2.6844670507249067, + "learning_rate": 1.1583007643833456e-05, + "loss": 1.6419, + "step": 2715 + }, + { + "epoch": 0.46610605800583493, + "grad_norm": 3.257506718681694, + "learning_rate": 1.157751911202789e-05, + "loss": 1.7382, + "step": 2716 + }, + { + "epoch": 0.4662776729020079, + "grad_norm": 3.2333817892731873, + "learning_rate": 1.157203009283969e-05, + "loss": 1.6799, + "step": 2717 + }, + { + "epoch": 0.4664492877981809, + "grad_norm": 2.974785354171492, + "learning_rate": 1.1566540587964701e-05, + "loss": 1.7748, + "step": 2718 + }, + { + "epoch": 0.46662090269435386, + "grad_norm": 3.3615610331804047, + "learning_rate": 1.1561050599098946e-05, + "loss": 1.585, + "step": 2719 + }, + { + "epoch": 0.46679251759052687, + "grad_norm": 3.4648143164697305, + "learning_rate": 1.1555560127938581e-05, + "loss": 1.6071, + "step": 2720 + }, + { + "epoch": 0.4669641324866998, + "grad_norm": 3.0709026539349082, + "learning_rate": 1.1550069176179912e-05, + "loss": 2.0446, + "step": 2721 + }, + { + "epoch": 0.46713574738287283, + "grad_norm": 2.924799060020064, + "learning_rate": 1.1544577745519404e-05, + "loss": 1.5837, + "step": 2722 + }, + { + "epoch": 0.46730736227904585, + "grad_norm": 2.8733459227228977, + "learning_rate": 1.1539085837653658e-05, + "loss": 1.5945, + "step": 2723 + }, + { + "epoch": 0.4674789771752188, + "grad_norm": 2.957541030174589, + "learning_rate": 1.153359345427943e-05, + "loss": 1.6443, + "step": 2724 + }, + { + "epoch": 0.4676505920713918, + "grad_norm": 4.200287007029637, + "learning_rate": 1.1528100597093617e-05, + "loss": 1.7809, + "step": 2725 + }, + { + "epoch": 0.46782220696756477, + "grad_norm": 3.0665873185535135, + "learning_rate": 1.1522607267793267e-05, + "loss": 1.8688, + "step": 2726 + }, + { + "epoch": 0.4679938218637378, + "grad_norm": 2.649840227315986, + "learning_rate": 1.1517113468075571e-05, + "loss": 1.5216, + "step": 2727 + }, + { + "epoch": 0.46816543675991074, + "grad_norm": 3.1182687055183287, + "learning_rate": 1.1511619199637867e-05, + "loss": 1.801, + "step": 2728 + }, + { + "epoch": 0.46833705165608375, + "grad_norm": 3.722326583440218, + "learning_rate": 1.1506124464177644e-05, + "loss": 1.7242, + "step": 2729 + }, + { + "epoch": 0.46850866655225676, + "grad_norm": 3.418809906180343, + "learning_rate": 1.1500629263392515e-05, + "loss": 1.5194, + "step": 2730 + }, + { + "epoch": 0.4686802814484297, + "grad_norm": 2.8030386222127626, + "learning_rate": 1.1495133598980263e-05, + "loss": 1.669, + "step": 2731 + }, + { + "epoch": 0.4688518963446027, + "grad_norm": 2.8254178012646873, + "learning_rate": 1.14896374726388e-05, + "loss": 1.5963, + "step": 2732 + }, + { + "epoch": 0.4690235112407757, + "grad_norm": 3.268378773235964, + "learning_rate": 1.1484140886066175e-05, + "loss": 1.7468, + "step": 2733 + }, + { + "epoch": 0.4691951261369487, + "grad_norm": 3.2343858464434643, + "learning_rate": 1.1478643840960593e-05, + "loss": 1.5206, + "step": 2734 + }, + { + "epoch": 0.46936674103312165, + "grad_norm": 3.101633034688997, + "learning_rate": 1.1473146339020395e-05, + "loss": 1.7645, + "step": 2735 + }, + { + "epoch": 0.46953835592929466, + "grad_norm": 3.261941115428923, + "learning_rate": 1.1467648381944059e-05, + "loss": 1.5458, + "step": 2736 + }, + { + "epoch": 0.46970997082546767, + "grad_norm": 3.2000324926959536, + "learning_rate": 1.1462149971430207e-05, + "loss": 1.6502, + "step": 2737 + }, + { + "epoch": 0.4698815857216406, + "grad_norm": 2.683811909228767, + "learning_rate": 1.1456651109177606e-05, + "loss": 1.785, + "step": 2738 + }, + { + "epoch": 0.47005320061781364, + "grad_norm": 3.473137594881375, + "learning_rate": 1.1451151796885152e-05, + "loss": 1.9541, + "step": 2739 + }, + { + "epoch": 0.4702248155139866, + "grad_norm": 3.89951515926513, + "learning_rate": 1.1445652036251889e-05, + "loss": 1.7742, + "step": 2740 + }, + { + "epoch": 0.4703964304101596, + "grad_norm": 2.8914918340040403, + "learning_rate": 1.1440151828976997e-05, + "loss": 1.6492, + "step": 2741 + }, + { + "epoch": 0.4705680453063326, + "grad_norm": 3.8691061908734024, + "learning_rate": 1.1434651176759788e-05, + "loss": 1.5376, + "step": 2742 + }, + { + "epoch": 0.47073966020250557, + "grad_norm": 2.554098204939939, + "learning_rate": 1.1429150081299725e-05, + "loss": 1.4747, + "step": 2743 + }, + { + "epoch": 0.4709112750986786, + "grad_norm": 2.9892517455549465, + "learning_rate": 1.1423648544296396e-05, + "loss": 1.6242, + "step": 2744 + }, + { + "epoch": 0.47108288999485154, + "grad_norm": 4.264472527148404, + "learning_rate": 1.1418146567449524e-05, + "loss": 1.5582, + "step": 2745 + }, + { + "epoch": 0.47125450489102455, + "grad_norm": 2.5160757051279337, + "learning_rate": 1.141264415245898e-05, + "loss": 1.4321, + "step": 2746 + }, + { + "epoch": 0.4714261197871975, + "grad_norm": 3.6144603903357413, + "learning_rate": 1.1407141301024762e-05, + "loss": 1.8127, + "step": 2747 + }, + { + "epoch": 0.4715977346833705, + "grad_norm": 3.50259899934638, + "learning_rate": 1.1401638014847004e-05, + "loss": 1.6851, + "step": 2748 + }, + { + "epoch": 0.47176934957954353, + "grad_norm": 3.268806407081182, + "learning_rate": 1.1396134295625971e-05, + "loss": 1.7497, + "step": 2749 + }, + { + "epoch": 0.4719409644757165, + "grad_norm": 2.6039966305613738, + "learning_rate": 1.1390630145062069e-05, + "loss": 1.4863, + "step": 2750 + }, + { + "epoch": 0.4721125793718895, + "grad_norm": 3.18163953916345, + "learning_rate": 1.1385125564855834e-05, + "loss": 1.6147, + "step": 2751 + }, + { + "epoch": 0.47228419426806245, + "grad_norm": 2.5101807829812333, + "learning_rate": 1.1379620556707934e-05, + "loss": 1.4585, + "step": 2752 + }, + { + "epoch": 0.47245580916423546, + "grad_norm": 3.116412550758941, + "learning_rate": 1.1374115122319168e-05, + "loss": 1.837, + "step": 2753 + }, + { + "epoch": 0.4726274240604084, + "grad_norm": 2.6387781732206705, + "learning_rate": 1.136860926339047e-05, + "loss": 1.4391, + "step": 2754 + }, + { + "epoch": 0.47279903895658143, + "grad_norm": 3.271791696632264, + "learning_rate": 1.13631029816229e-05, + "loss": 1.7422, + "step": 2755 + }, + { + "epoch": 0.47297065385275444, + "grad_norm": 3.0034510366163345, + "learning_rate": 1.1357596278717655e-05, + "loss": 1.7334, + "step": 2756 + }, + { + "epoch": 0.4731422687489274, + "grad_norm": 6.813218808858523, + "learning_rate": 1.1352089156376059e-05, + "loss": 1.7685, + "step": 2757 + }, + { + "epoch": 0.4733138836451004, + "grad_norm": 3.3620016308789182, + "learning_rate": 1.1346581616299565e-05, + "loss": 1.648, + "step": 2758 + }, + { + "epoch": 0.47348549854127336, + "grad_norm": 3.2204626114586055, + "learning_rate": 1.1341073660189761e-05, + "loss": 1.5946, + "step": 2759 + }, + { + "epoch": 0.4736571134374464, + "grad_norm": 3.23109584682806, + "learning_rate": 1.133556528974835e-05, + "loss": 1.4936, + "step": 2760 + }, + { + "epoch": 0.47382872833361933, + "grad_norm": 3.04568311245728, + "learning_rate": 1.1330056506677174e-05, + "loss": 1.605, + "step": 2761 + }, + { + "epoch": 0.47400034322979234, + "grad_norm": 3.4639504586409586, + "learning_rate": 1.1324547312678203e-05, + "loss": 1.9035, + "step": 2762 + }, + { + "epoch": 0.47417195812596535, + "grad_norm": 4.350620942665327, + "learning_rate": 1.1319037709453527e-05, + "loss": 1.523, + "step": 2763 + }, + { + "epoch": 0.4743435730221383, + "grad_norm": 3.2043148680157008, + "learning_rate": 1.1313527698705367e-05, + "loss": 1.4964, + "step": 2764 + }, + { + "epoch": 0.4745151879183113, + "grad_norm": 3.3011732397718747, + "learning_rate": 1.1308017282136074e-05, + "loss": 1.7793, + "step": 2765 + }, + { + "epoch": 0.4746868028144843, + "grad_norm": 2.823107797702438, + "learning_rate": 1.1302506461448112e-05, + "loss": 1.5238, + "step": 2766 + }, + { + "epoch": 0.4748584177106573, + "grad_norm": 3.1758629735880897, + "learning_rate": 1.1296995238344084e-05, + "loss": 1.6741, + "step": 2767 + }, + { + "epoch": 0.4750300326068303, + "grad_norm": 3.4470923615266544, + "learning_rate": 1.1291483614526707e-05, + "loss": 1.7997, + "step": 2768 + }, + { + "epoch": 0.47520164750300325, + "grad_norm": 4.848256642634677, + "learning_rate": 1.1285971591698825e-05, + "loss": 1.6794, + "step": 2769 + }, + { + "epoch": 0.47537326239917627, + "grad_norm": 3.325807318614395, + "learning_rate": 1.1280459171563409e-05, + "loss": 1.6517, + "step": 2770 + }, + { + "epoch": 0.4755448772953492, + "grad_norm": 2.4559976190038086, + "learning_rate": 1.1274946355823544e-05, + "loss": 1.519, + "step": 2771 + }, + { + "epoch": 0.47571649219152223, + "grad_norm": 3.0289664975297685, + "learning_rate": 1.1269433146182452e-05, + "loss": 1.6459, + "step": 2772 + }, + { + "epoch": 0.4758881070876952, + "grad_norm": 2.744270545965551, + "learning_rate": 1.1263919544343458e-05, + "loss": 1.4744, + "step": 2773 + }, + { + "epoch": 0.4760597219838682, + "grad_norm": 3.400099484689994, + "learning_rate": 1.1258405552010024e-05, + "loss": 1.6912, + "step": 2774 + }, + { + "epoch": 0.4762313368800412, + "grad_norm": 2.626946394496932, + "learning_rate": 1.125289117088572e-05, + "loss": 1.5595, + "step": 2775 + }, + { + "epoch": 0.47640295177621417, + "grad_norm": 2.5841259694104064, + "learning_rate": 1.1247376402674248e-05, + "loss": 1.7375, + "step": 2776 + }, + { + "epoch": 0.4765745666723872, + "grad_norm": 3.1479848933931387, + "learning_rate": 1.1241861249079421e-05, + "loss": 1.6634, + "step": 2777 + }, + { + "epoch": 0.47674618156856013, + "grad_norm": 2.898102679925583, + "learning_rate": 1.1236345711805178e-05, + "loss": 1.553, + "step": 2778 + }, + { + "epoch": 0.47691779646473315, + "grad_norm": 3.0310873410122596, + "learning_rate": 1.1230829792555571e-05, + "loss": 1.92, + "step": 2779 + }, + { + "epoch": 0.4770894113609061, + "grad_norm": 3.407614338605185, + "learning_rate": 1.1225313493034766e-05, + "loss": 1.9593, + "step": 2780 + }, + { + "epoch": 0.4772610262570791, + "grad_norm": 3.1261188715414754, + "learning_rate": 1.121979681494706e-05, + "loss": 1.7933, + "step": 2781 + }, + { + "epoch": 0.4774326411532521, + "grad_norm": 3.4222389957318073, + "learning_rate": 1.1214279759996856e-05, + "loss": 1.5977, + "step": 2782 + }, + { + "epoch": 0.4776042560494251, + "grad_norm": 2.918431014030688, + "learning_rate": 1.1208762329888672e-05, + "loss": 1.676, + "step": 2783 + }, + { + "epoch": 0.4777758709455981, + "grad_norm": 3.661972980629568, + "learning_rate": 1.1203244526327154e-05, + "loss": 1.8453, + "step": 2784 + }, + { + "epoch": 0.47794748584177105, + "grad_norm": 2.916482627882997, + "learning_rate": 1.1197726351017052e-05, + "loss": 1.5532, + "step": 2785 + }, + { + "epoch": 0.47811910073794406, + "grad_norm": 2.925320552586996, + "learning_rate": 1.1192207805663235e-05, + "loss": 1.6373, + "step": 2786 + }, + { + "epoch": 0.478290715634117, + "grad_norm": 4.004958318213895, + "learning_rate": 1.1186688891970686e-05, + "loss": 1.8329, + "step": 2787 + }, + { + "epoch": 0.47846233053029, + "grad_norm": 3.3370594836402296, + "learning_rate": 1.11811696116445e-05, + "loss": 1.7443, + "step": 2788 + }, + { + "epoch": 0.47863394542646304, + "grad_norm": 4.24103521437125, + "learning_rate": 1.117564996638989e-05, + "loss": 1.6517, + "step": 2789 + }, + { + "epoch": 0.478805560322636, + "grad_norm": 3.502691871151845, + "learning_rate": 1.1170129957912178e-05, + "loss": 1.748, + "step": 2790 + }, + { + "epoch": 0.478977175218809, + "grad_norm": 2.8419903616400917, + "learning_rate": 1.11646095879168e-05, + "loss": 1.2875, + "step": 2791 + }, + { + "epoch": 0.47914879011498196, + "grad_norm": 3.1084119414899987, + "learning_rate": 1.1159088858109297e-05, + "loss": 1.5118, + "step": 2792 + }, + { + "epoch": 0.47932040501115497, + "grad_norm": 3.2895469202610963, + "learning_rate": 1.1153567770195333e-05, + "loss": 1.5436, + "step": 2793 + }, + { + "epoch": 0.479492019907328, + "grad_norm": 2.9641494357348446, + "learning_rate": 1.1148046325880675e-05, + "loss": 1.568, + "step": 2794 + }, + { + "epoch": 0.47966363480350094, + "grad_norm": 3.1339999930629623, + "learning_rate": 1.1142524526871201e-05, + "loss": 1.6977, + "step": 2795 + }, + { + "epoch": 0.47983524969967395, + "grad_norm": 3.1014251288204995, + "learning_rate": 1.11370023748729e-05, + "loss": 1.4696, + "step": 2796 + }, + { + "epoch": 0.4800068645958469, + "grad_norm": 2.839720240833112, + "learning_rate": 1.1131479871591869e-05, + "loss": 1.6802, + "step": 2797 + }, + { + "epoch": 0.4801784794920199, + "grad_norm": 3.521269095352219, + "learning_rate": 1.1125957018734313e-05, + "loss": 1.637, + "step": 2798 + }, + { + "epoch": 0.48035009438819287, + "grad_norm": 2.81676392013561, + "learning_rate": 1.1120433818006546e-05, + "loss": 1.7803, + "step": 2799 + }, + { + "epoch": 0.4805217092843659, + "grad_norm": 3.301384589818475, + "learning_rate": 1.1114910271114987e-05, + "loss": 1.6878, + "step": 2800 + }, + { + "epoch": 0.4806933241805389, + "grad_norm": 2.9090433705044947, + "learning_rate": 1.1109386379766169e-05, + "loss": 1.5992, + "step": 2801 + }, + { + "epoch": 0.48086493907671185, + "grad_norm": 2.616402299100735, + "learning_rate": 1.1103862145666726e-05, + "loss": 1.4713, + "step": 2802 + }, + { + "epoch": 0.48103655397288486, + "grad_norm": 3.5448041579770435, + "learning_rate": 1.1098337570523397e-05, + "loss": 1.7569, + "step": 2803 + }, + { + "epoch": 0.4812081688690578, + "grad_norm": 4.259451430634504, + "learning_rate": 1.1092812656043024e-05, + "loss": 1.8535, + "step": 2804 + }, + { + "epoch": 0.48137978376523083, + "grad_norm": 2.9532808526512744, + "learning_rate": 1.1087287403932563e-05, + "loss": 1.4457, + "step": 2805 + }, + { + "epoch": 0.4815513986614038, + "grad_norm": 2.9157893704847395, + "learning_rate": 1.108176181589907e-05, + "loss": 1.7901, + "step": 2806 + }, + { + "epoch": 0.4817230135575768, + "grad_norm": 2.894146662843656, + "learning_rate": 1.1076235893649697e-05, + "loss": 1.6124, + "step": 2807 + }, + { + "epoch": 0.4818946284537498, + "grad_norm": 3.7018813569207785, + "learning_rate": 1.1070709638891718e-05, + "loss": 1.5624, + "step": 2808 + }, + { + "epoch": 0.48206624334992276, + "grad_norm": 4.5730868751411915, + "learning_rate": 1.1065183053332481e-05, + "loss": 1.6043, + "step": 2809 + }, + { + "epoch": 0.4822378582460958, + "grad_norm": 2.80262190571527, + "learning_rate": 1.1059656138679468e-05, + "loss": 1.664, + "step": 2810 + }, + { + "epoch": 0.48240947314226873, + "grad_norm": 2.873055551575474, + "learning_rate": 1.1054128896640241e-05, + "loss": 1.7196, + "step": 2811 + }, + { + "epoch": 0.48258108803844174, + "grad_norm": 2.7503699672206543, + "learning_rate": 1.104860132892247e-05, + "loss": 1.4872, + "step": 2812 + }, + { + "epoch": 0.4827527029346147, + "grad_norm": 3.0155279658603624, + "learning_rate": 1.1043073437233927e-05, + "loss": 1.8158, + "step": 2813 + }, + { + "epoch": 0.4829243178307877, + "grad_norm": 2.7717221391124522, + "learning_rate": 1.1037545223282481e-05, + "loss": 1.518, + "step": 2814 + }, + { + "epoch": 0.4830959327269607, + "grad_norm": 4.218176163062874, + "learning_rate": 1.1032016688776106e-05, + "loss": 1.6093, + "step": 2815 + }, + { + "epoch": 0.4832675476231337, + "grad_norm": 3.482201585177803, + "learning_rate": 1.1026487835422866e-05, + "loss": 1.7539, + "step": 2816 + }, + { + "epoch": 0.4834391625193067, + "grad_norm": 2.989619157050514, + "learning_rate": 1.1020958664930932e-05, + "loss": 1.9388, + "step": 2817 + }, + { + "epoch": 0.48361077741547964, + "grad_norm": 2.669275749442373, + "learning_rate": 1.1015429179008567e-05, + "loss": 1.6032, + "step": 2818 + }, + { + "epoch": 0.48378239231165265, + "grad_norm": 3.621288304107407, + "learning_rate": 1.1009899379364136e-05, + "loss": 1.7854, + "step": 2819 + }, + { + "epoch": 0.48395400720782566, + "grad_norm": 3.704668257147288, + "learning_rate": 1.10043692677061e-05, + "loss": 1.6491, + "step": 2820 + }, + { + "epoch": 0.4841256221039986, + "grad_norm": 2.9470841067166584, + "learning_rate": 1.0998838845743012e-05, + "loss": 1.5437, + "step": 2821 + }, + { + "epoch": 0.48429723700017163, + "grad_norm": 3.2612092655267655, + "learning_rate": 1.099330811518353e-05, + "loss": 1.6303, + "step": 2822 + }, + { + "epoch": 0.4844688518963446, + "grad_norm": 2.9523898895713243, + "learning_rate": 1.0987777077736395e-05, + "loss": 1.7984, + "step": 2823 + }, + { + "epoch": 0.4846404667925176, + "grad_norm": 3.3671597516416747, + "learning_rate": 1.0982245735110453e-05, + "loss": 1.6399, + "step": 2824 + }, + { + "epoch": 0.48481208168869055, + "grad_norm": 3.5269472296949957, + "learning_rate": 1.097671408901464e-05, + "loss": 1.725, + "step": 2825 + }, + { + "epoch": 0.48498369658486357, + "grad_norm": 3.2127318967725658, + "learning_rate": 1.0971182141157988e-05, + "loss": 1.4497, + "step": 2826 + }, + { + "epoch": 0.4851553114810366, + "grad_norm": 2.920910921567526, + "learning_rate": 1.0965649893249619e-05, + "loss": 1.6653, + "step": 2827 + }, + { + "epoch": 0.48532692637720953, + "grad_norm": 2.6731906276150097, + "learning_rate": 1.0960117346998748e-05, + "loss": 1.6553, + "step": 2828 + }, + { + "epoch": 0.48549854127338254, + "grad_norm": 3.689974284079219, + "learning_rate": 1.0954584504114689e-05, + "loss": 1.7421, + "step": 2829 + }, + { + "epoch": 0.4856701561695555, + "grad_norm": 2.924402660432866, + "learning_rate": 1.0949051366306838e-05, + "loss": 1.7086, + "step": 2830 + }, + { + "epoch": 0.4858417710657285, + "grad_norm": 3.734333067273493, + "learning_rate": 1.0943517935284684e-05, + "loss": 1.7158, + "step": 2831 + }, + { + "epoch": 0.48601338596190147, + "grad_norm": 2.8206577575119285, + "learning_rate": 1.093798421275782e-05, + "loss": 1.5716, + "step": 2832 + }, + { + "epoch": 0.4861850008580745, + "grad_norm": 3.027368031065995, + "learning_rate": 1.0932450200435902e-05, + "loss": 1.5311, + "step": 2833 + }, + { + "epoch": 0.4863566157542475, + "grad_norm": 3.086671055753802, + "learning_rate": 1.0926915900028703e-05, + "loss": 1.6866, + "step": 2834 + }, + { + "epoch": 0.48652823065042045, + "grad_norm": 4.204208530057646, + "learning_rate": 1.092138131324607e-05, + "loss": 1.9312, + "step": 2835 + }, + { + "epoch": 0.48669984554659346, + "grad_norm": 2.63895075432221, + "learning_rate": 1.0915846441797942e-05, + "loss": 1.5832, + "step": 2836 + }, + { + "epoch": 0.4868714604427664, + "grad_norm": 2.9889238006434073, + "learning_rate": 1.0910311287394348e-05, + "loss": 1.634, + "step": 2837 + }, + { + "epoch": 0.4870430753389394, + "grad_norm": 4.811711443546964, + "learning_rate": 1.0904775851745399e-05, + "loss": 1.5856, + "step": 2838 + }, + { + "epoch": 0.48721469023511244, + "grad_norm": 5.350197211889695, + "learning_rate": 1.08992401365613e-05, + "loss": 1.6645, + "step": 2839 + }, + { + "epoch": 0.4873863051312854, + "grad_norm": 3.4733803399677106, + "learning_rate": 1.0893704143552335e-05, + "loss": 1.7122, + "step": 2840 + }, + { + "epoch": 0.4875579200274584, + "grad_norm": 3.633986656472618, + "learning_rate": 1.0888167874428881e-05, + "loss": 1.7649, + "step": 2841 + }, + { + "epoch": 0.48772953492363136, + "grad_norm": 3.4331663012659224, + "learning_rate": 1.0882631330901394e-05, + "loss": 1.7527, + "step": 2842 + }, + { + "epoch": 0.48790114981980437, + "grad_norm": 3.2625999967690054, + "learning_rate": 1.0877094514680417e-05, + "loss": 1.4977, + "step": 2843 + }, + { + "epoch": 0.4880727647159773, + "grad_norm": 5.037079012270086, + "learning_rate": 1.0871557427476585e-05, + "loss": 1.4589, + "step": 2844 + }, + { + "epoch": 0.48824437961215034, + "grad_norm": 2.858837902994473, + "learning_rate": 1.0866020071000597e-05, + "loss": 1.7217, + "step": 2845 + }, + { + "epoch": 0.48841599450832335, + "grad_norm": 2.685082105907214, + "learning_rate": 1.0860482446963262e-05, + "loss": 1.6504, + "step": 2846 + }, + { + "epoch": 0.4885876094044963, + "grad_norm": 3.2473865101242474, + "learning_rate": 1.0854944557075447e-05, + "loss": 1.6097, + "step": 2847 + }, + { + "epoch": 0.4887592243006693, + "grad_norm": 2.7403191919040615, + "learning_rate": 1.084940640304811e-05, + "loss": 1.8809, + "step": 2848 + }, + { + "epoch": 0.48893083919684227, + "grad_norm": 2.465539444705478, + "learning_rate": 1.0843867986592302e-05, + "loss": 1.4832, + "step": 2849 + }, + { + "epoch": 0.4891024540930153, + "grad_norm": 2.815987875335105, + "learning_rate": 1.0838329309419142e-05, + "loss": 1.3072, + "step": 2850 + }, + { + "epoch": 0.48927406898918824, + "grad_norm": 3.031167819980559, + "learning_rate": 1.083279037323983e-05, + "loss": 1.5897, + "step": 2851 + }, + { + "epoch": 0.48944568388536125, + "grad_norm": 2.892950854750241, + "learning_rate": 1.0827251179765645e-05, + "loss": 1.6187, + "step": 2852 + }, + { + "epoch": 0.48961729878153426, + "grad_norm": 3.1684669236398952, + "learning_rate": 1.082171173070796e-05, + "loss": 1.9, + "step": 2853 + }, + { + "epoch": 0.4897889136777072, + "grad_norm": 2.508664755082155, + "learning_rate": 1.0816172027778208e-05, + "loss": 1.4261, + "step": 2854 + }, + { + "epoch": 0.4899605285738802, + "grad_norm": 2.957792136495784, + "learning_rate": 1.0810632072687909e-05, + "loss": 1.8966, + "step": 2855 + }, + { + "epoch": 0.4901321434700532, + "grad_norm": 2.4862657280334433, + "learning_rate": 1.080509186714867e-05, + "loss": 1.5494, + "step": 2856 + }, + { + "epoch": 0.4903037583662262, + "grad_norm": 2.8836217406058227, + "learning_rate": 1.0799551412872151e-05, + "loss": 1.9226, + "step": 2857 + }, + { + "epoch": 0.49047537326239915, + "grad_norm": 7.2204784614255155, + "learning_rate": 1.0794010711570115e-05, + "loss": 1.624, + "step": 2858 + }, + { + "epoch": 0.49064698815857216, + "grad_norm": 3.3855069852818134, + "learning_rate": 1.0788469764954388e-05, + "loss": 1.5452, + "step": 2859 + }, + { + "epoch": 0.4908186030547452, + "grad_norm": 3.2140446072472373, + "learning_rate": 1.0782928574736869e-05, + "loss": 1.7694, + "step": 2860 + }, + { + "epoch": 0.49099021795091813, + "grad_norm": 2.5770444220208186, + "learning_rate": 1.0777387142629543e-05, + "loss": 1.4273, + "step": 2861 + }, + { + "epoch": 0.49116183284709114, + "grad_norm": 2.63682652341496, + "learning_rate": 1.0771845470344462e-05, + "loss": 1.6189, + "step": 2862 + }, + { + "epoch": 0.4913334477432641, + "grad_norm": 2.7074590769967743, + "learning_rate": 1.0766303559593754e-05, + "loss": 1.6293, + "step": 2863 + }, + { + "epoch": 0.4915050626394371, + "grad_norm": 3.4775218455695356, + "learning_rate": 1.076076141208962e-05, + "loss": 1.7053, + "step": 2864 + }, + { + "epoch": 0.4916766775356101, + "grad_norm": 3.1410804742670995, + "learning_rate": 1.0755219029544337e-05, + "loss": 1.8371, + "step": 2865 + }, + { + "epoch": 0.4918482924317831, + "grad_norm": 2.726681343008733, + "learning_rate": 1.0749676413670252e-05, + "loss": 1.6201, + "step": 2866 + }, + { + "epoch": 0.4920199073279561, + "grad_norm": 3.0176802644912035, + "learning_rate": 1.074413356617978e-05, + "loss": 1.8257, + "step": 2867 + }, + { + "epoch": 0.49219152222412904, + "grad_norm": 2.6827995439150203, + "learning_rate": 1.073859048878542e-05, + "loss": 1.5066, + "step": 2868 + }, + { + "epoch": 0.49236313712030205, + "grad_norm": 3.2900236891010572, + "learning_rate": 1.0733047183199729e-05, + "loss": 1.7236, + "step": 2869 + }, + { + "epoch": 0.492534752016475, + "grad_norm": 3.0106033897329616, + "learning_rate": 1.0727503651135343e-05, + "loss": 1.669, + "step": 2870 + }, + { + "epoch": 0.492706366912648, + "grad_norm": 3.1550179081101475, + "learning_rate": 1.0721959894304963e-05, + "loss": 1.5022, + "step": 2871 + }, + { + "epoch": 0.49287798180882103, + "grad_norm": 2.81830665913476, + "learning_rate": 1.071641591442136e-05, + "loss": 1.6757, + "step": 2872 + }, + { + "epoch": 0.493049596704994, + "grad_norm": 6.1658539954755405, + "learning_rate": 1.0710871713197374e-05, + "loss": 1.8812, + "step": 2873 + }, + { + "epoch": 0.493221211601167, + "grad_norm": 2.8799999685540225, + "learning_rate": 1.0705327292345922e-05, + "loss": 1.8775, + "step": 2874 + }, + { + "epoch": 0.49339282649733995, + "grad_norm": 2.9679981499183783, + "learning_rate": 1.0699782653579973e-05, + "loss": 1.8642, + "step": 2875 + }, + { + "epoch": 0.49356444139351296, + "grad_norm": 3.2672909629974956, + "learning_rate": 1.0694237798612574e-05, + "loss": 1.3912, + "step": 2876 + }, + { + "epoch": 0.4937360562896859, + "grad_norm": 3.11212245892745, + "learning_rate": 1.068869272915684e-05, + "loss": 1.6626, + "step": 2877 + }, + { + "epoch": 0.49390767118585893, + "grad_norm": 3.3996329143275315, + "learning_rate": 1.0683147446925943e-05, + "loss": 1.8161, + "step": 2878 + }, + { + "epoch": 0.49407928608203194, + "grad_norm": 2.8148004124344173, + "learning_rate": 1.067760195363313e-05, + "loss": 1.3539, + "step": 2879 + }, + { + "epoch": 0.4942509009782049, + "grad_norm": 3.3638402236750844, + "learning_rate": 1.0672056250991714e-05, + "loss": 1.7545, + "step": 2880 + }, + { + "epoch": 0.4944225158743779, + "grad_norm": 3.221325042639084, + "learning_rate": 1.0666510340715057e-05, + "loss": 1.7475, + "step": 2881 + }, + { + "epoch": 0.49459413077055087, + "grad_norm": 3.763264646160915, + "learning_rate": 1.0660964224516606e-05, + "loss": 1.7146, + "step": 2882 + }, + { + "epoch": 0.4947657456667239, + "grad_norm": 2.9305646212777456, + "learning_rate": 1.0655417904109859e-05, + "loss": 1.7116, + "step": 2883 + }, + { + "epoch": 0.49493736056289683, + "grad_norm": 3.9408742167039073, + "learning_rate": 1.0649871381208379e-05, + "loss": 1.5674, + "step": 2884 + }, + { + "epoch": 0.49510897545906984, + "grad_norm": 4.100534497247508, + "learning_rate": 1.0644324657525795e-05, + "loss": 1.9224, + "step": 2885 + }, + { + "epoch": 0.49528059035524286, + "grad_norm": 2.787121561972098, + "learning_rate": 1.0638777734775795e-05, + "loss": 1.7637, + "step": 2886 + }, + { + "epoch": 0.4954522052514158, + "grad_norm": 3.6700902910857347, + "learning_rate": 1.063323061467213e-05, + "loss": 1.6383, + "step": 2887 + }, + { + "epoch": 0.4956238201475888, + "grad_norm": 2.963964690375311, + "learning_rate": 1.0627683298928607e-05, + "loss": 1.6797, + "step": 2888 + }, + { + "epoch": 0.4957954350437618, + "grad_norm": 3.545970293150631, + "learning_rate": 1.0622135789259105e-05, + "loss": 1.6165, + "step": 2889 + }, + { + "epoch": 0.4959670499399348, + "grad_norm": 2.7483820087286284, + "learning_rate": 1.061658808737755e-05, + "loss": 1.6622, + "step": 2890 + }, + { + "epoch": 0.4961386648361078, + "grad_norm": 3.1393123758316546, + "learning_rate": 1.0611040194997933e-05, + "loss": 1.6765, + "step": 2891 + }, + { + "epoch": 0.49631027973228076, + "grad_norm": 3.0805616736243433, + "learning_rate": 1.060549211383431e-05, + "loss": 1.4133, + "step": 2892 + }, + { + "epoch": 0.49648189462845377, + "grad_norm": 4.19817350974281, + "learning_rate": 1.0599943845600781e-05, + "loss": 1.8748, + "step": 2893 + }, + { + "epoch": 0.4966535095246267, + "grad_norm": 5.320784312248385, + "learning_rate": 1.0594395392011518e-05, + "loss": 1.7341, + "step": 2894 + }, + { + "epoch": 0.49682512442079974, + "grad_norm": 3.4793049245072902, + "learning_rate": 1.0588846754780744e-05, + "loss": 1.5339, + "step": 2895 + }, + { + "epoch": 0.4969967393169727, + "grad_norm": 3.4858430134900047, + "learning_rate": 1.0583297935622732e-05, + "loss": 1.5074, + "step": 2896 + }, + { + "epoch": 0.4971683542131457, + "grad_norm": 2.619043149279237, + "learning_rate": 1.0577748936251829e-05, + "loss": 1.6884, + "step": 2897 + }, + { + "epoch": 0.4973399691093187, + "grad_norm": 3.562472813773365, + "learning_rate": 1.0572199758382417e-05, + "loss": 1.5751, + "step": 2898 + }, + { + "epoch": 0.49751158400549167, + "grad_norm": 3.264365661674337, + "learning_rate": 1.056665040372895e-05, + "loss": 1.6157, + "step": 2899 + }, + { + "epoch": 0.4976831989016647, + "grad_norm": 3.2884989911815716, + "learning_rate": 1.0561100874005925e-05, + "loss": 1.5677, + "step": 2900 + }, + { + "epoch": 0.49785481379783764, + "grad_norm": 2.9259898359026795, + "learning_rate": 1.05555511709279e-05, + "loss": 1.644, + "step": 2901 + }, + { + "epoch": 0.49802642869401065, + "grad_norm": 3.2469976450023847, + "learning_rate": 1.0550001296209487e-05, + "loss": 1.6406, + "step": 2902 + }, + { + "epoch": 0.4981980435901836, + "grad_norm": 3.056174535833804, + "learning_rate": 1.054445125156534e-05, + "loss": 1.619, + "step": 2903 + }, + { + "epoch": 0.4983696584863566, + "grad_norm": 2.984069357007831, + "learning_rate": 1.0538901038710186e-05, + "loss": 1.4725, + "step": 2904 + }, + { + "epoch": 0.4985412733825296, + "grad_norm": 3.053950465963755, + "learning_rate": 1.0533350659358779e-05, + "loss": 1.5265, + "step": 2905 + }, + { + "epoch": 0.4987128882787026, + "grad_norm": 3.4651192292977586, + "learning_rate": 1.0527800115225945e-05, + "loss": 1.6516, + "step": 2906 + }, + { + "epoch": 0.4988845031748756, + "grad_norm": 3.057599146523825, + "learning_rate": 1.0522249408026553e-05, + "loss": 1.6218, + "step": 2907 + }, + { + "epoch": 0.49905611807104855, + "grad_norm": 3.129506370889882, + "learning_rate": 1.0516698539475518e-05, + "loss": 1.6617, + "step": 2908 + }, + { + "epoch": 0.49922773296722156, + "grad_norm": 6.273617084517068, + "learning_rate": 1.0511147511287813e-05, + "loss": 1.7429, + "step": 2909 + }, + { + "epoch": 0.4993993478633945, + "grad_norm": 14.76701678271327, + "learning_rate": 1.0505596325178458e-05, + "loss": 1.8852, + "step": 2910 + }, + { + "epoch": 0.4995709627595675, + "grad_norm": 3.211466359060046, + "learning_rate": 1.0500044982862519e-05, + "loss": 1.7959, + "step": 2911 + }, + { + "epoch": 0.49974257765574054, + "grad_norm": 3.223250055081025, + "learning_rate": 1.0494493486055108e-05, + "loss": 1.5882, + "step": 2912 + }, + { + "epoch": 0.4999141925519135, + "grad_norm": 3.2228820051245752, + "learning_rate": 1.0488941836471393e-05, + "loss": 1.4302, + "step": 2913 + }, + { + "epoch": 0.5000858074480865, + "grad_norm": 3.7748502855531703, + "learning_rate": 1.0483390035826586e-05, + "loss": 1.6462, + "step": 2914 + }, + { + "epoch": 0.5002574223442595, + "grad_norm": 2.662677804872833, + "learning_rate": 1.0477838085835936e-05, + "loss": 1.5748, + "step": 2915 + }, + { + "epoch": 0.5004290372404324, + "grad_norm": 3.3593050995196876, + "learning_rate": 1.047228598821476e-05, + "loss": 1.8182, + "step": 2916 + }, + { + "epoch": 0.5006006521366054, + "grad_norm": 2.762102391538692, + "learning_rate": 1.0466733744678392e-05, + "loss": 1.6905, + "step": 2917 + }, + { + "epoch": 0.5007722670327784, + "grad_norm": 3.354254556898484, + "learning_rate": 1.0461181356942239e-05, + "loss": 1.5207, + "step": 2918 + }, + { + "epoch": 0.5009438819289515, + "grad_norm": 3.9023037760207897, + "learning_rate": 1.0455628826721733e-05, + "loss": 1.6755, + "step": 2919 + }, + { + "epoch": 0.5011154968251245, + "grad_norm": 2.763682270620225, + "learning_rate": 1.0450076155732357e-05, + "loss": 1.6945, + "step": 2920 + }, + { + "epoch": 0.5012871117212974, + "grad_norm": 3.2913572941270357, + "learning_rate": 1.0444523345689637e-05, + "loss": 1.5213, + "step": 2921 + }, + { + "epoch": 0.5014587266174704, + "grad_norm": 4.571939576018699, + "learning_rate": 1.0438970398309145e-05, + "loss": 1.6143, + "step": 2922 + }, + { + "epoch": 0.5016303415136434, + "grad_norm": 3.246076186091857, + "learning_rate": 1.0433417315306494e-05, + "loss": 1.7915, + "step": 2923 + }, + { + "epoch": 0.5018019564098164, + "grad_norm": 4.1703491706708, + "learning_rate": 1.042786409839733e-05, + "loss": 1.8054, + "step": 2924 + }, + { + "epoch": 0.5019735713059894, + "grad_norm": 2.7798726698606573, + "learning_rate": 1.0422310749297355e-05, + "loss": 1.657, + "step": 2925 + }, + { + "epoch": 0.5021451862021623, + "grad_norm": 3.277794573925155, + "learning_rate": 1.0416757269722301e-05, + "loss": 1.7684, + "step": 2926 + }, + { + "epoch": 0.5023168010983353, + "grad_norm": 3.198920525110606, + "learning_rate": 1.0411203661387944e-05, + "loss": 1.796, + "step": 2927 + }, + { + "epoch": 0.5024884159945083, + "grad_norm": 3.258831816950959, + "learning_rate": 1.0405649926010108e-05, + "loss": 1.4681, + "step": 2928 + }, + { + "epoch": 0.5026600308906813, + "grad_norm": 3.440171045797029, + "learning_rate": 1.0400096065304637e-05, + "loss": 1.5857, + "step": 2929 + }, + { + "epoch": 0.5028316457868544, + "grad_norm": 3.4265772411020223, + "learning_rate": 1.0394542080987429e-05, + "loss": 1.6542, + "step": 2930 + }, + { + "epoch": 0.5030032606830273, + "grad_norm": 2.975751179522291, + "learning_rate": 1.0388987974774417e-05, + "loss": 1.6956, + "step": 2931 + }, + { + "epoch": 0.5031748755792003, + "grad_norm": 3.117335313176168, + "learning_rate": 1.0383433748381571e-05, + "loss": 1.6947, + "step": 2932 + }, + { + "epoch": 0.5033464904753733, + "grad_norm": 2.5805978855208815, + "learning_rate": 1.0377879403524895e-05, + "loss": 1.1251, + "step": 2933 + }, + { + "epoch": 0.5035181053715463, + "grad_norm": 3.2249294499925374, + "learning_rate": 1.0372324941920435e-05, + "loss": 1.6929, + "step": 2934 + }, + { + "epoch": 0.5036897202677192, + "grad_norm": 3.0081875451924986, + "learning_rate": 1.0366770365284271e-05, + "loss": 1.7803, + "step": 2935 + }, + { + "epoch": 0.5038613351638922, + "grad_norm": 3.249011312384148, + "learning_rate": 1.0361215675332512e-05, + "loss": 1.4356, + "step": 2936 + }, + { + "epoch": 0.5040329500600652, + "grad_norm": 3.585741225233707, + "learning_rate": 1.0355660873781316e-05, + "loss": 1.4232, + "step": 2937 + }, + { + "epoch": 0.5042045649562382, + "grad_norm": 3.4699371079678776, + "learning_rate": 1.0350105962346866e-05, + "loss": 1.9963, + "step": 2938 + }, + { + "epoch": 0.5043761798524112, + "grad_norm": 3.086066657091744, + "learning_rate": 1.0344550942745376e-05, + "loss": 1.6971, + "step": 2939 + }, + { + "epoch": 0.5045477947485841, + "grad_norm": 3.962657677326349, + "learning_rate": 1.0338995816693103e-05, + "loss": 1.7095, + "step": 2940 + }, + { + "epoch": 0.5047194096447571, + "grad_norm": 3.572988293252918, + "learning_rate": 1.0333440585906326e-05, + "loss": 1.6938, + "step": 2941 + }, + { + "epoch": 0.5048910245409302, + "grad_norm": 3.2501704646702843, + "learning_rate": 1.0327885252101368e-05, + "loss": 1.6038, + "step": 2942 + }, + { + "epoch": 0.5050626394371032, + "grad_norm": 3.0890353322432333, + "learning_rate": 1.0322329816994574e-05, + "loss": 1.564, + "step": 2943 + }, + { + "epoch": 0.5052342543332762, + "grad_norm": 2.9619208484997035, + "learning_rate": 1.0316774282302324e-05, + "loss": 1.5862, + "step": 2944 + }, + { + "epoch": 0.5054058692294491, + "grad_norm": 3.72995209511458, + "learning_rate": 1.0311218649741033e-05, + "loss": 1.5326, + "step": 2945 + }, + { + "epoch": 0.5055774841256221, + "grad_norm": 2.9784824088073676, + "learning_rate": 1.030566292102714e-05, + "loss": 1.7438, + "step": 2946 + }, + { + "epoch": 0.5057490990217951, + "grad_norm": 4.106715634550961, + "learning_rate": 1.0300107097877114e-05, + "loss": 1.7303, + "step": 2947 + }, + { + "epoch": 0.5059207139179681, + "grad_norm": 3.1130712411699815, + "learning_rate": 1.0294551182007457e-05, + "loss": 1.6983, + "step": 2948 + }, + { + "epoch": 0.506092328814141, + "grad_norm": 3.218880328414794, + "learning_rate": 1.02889951751347e-05, + "loss": 1.746, + "step": 2949 + }, + { + "epoch": 0.506263943710314, + "grad_norm": 3.6755946075999635, + "learning_rate": 1.0283439078975398e-05, + "loss": 1.4895, + "step": 2950 + }, + { + "epoch": 0.506435558606487, + "grad_norm": 3.750564088714565, + "learning_rate": 1.027788289524613e-05, + "loss": 1.6799, + "step": 2951 + }, + { + "epoch": 0.50660717350266, + "grad_norm": 3.1956894531704774, + "learning_rate": 1.0272326625663518e-05, + "loss": 1.5424, + "step": 2952 + }, + { + "epoch": 0.5067787883988331, + "grad_norm": 3.2637943492249546, + "learning_rate": 1.0266770271944191e-05, + "loss": 1.6941, + "step": 2953 + }, + { + "epoch": 0.506950403295006, + "grad_norm": 3.354373593989549, + "learning_rate": 1.0261213835804819e-05, + "loss": 1.8451, + "step": 2954 + }, + { + "epoch": 0.507122018191179, + "grad_norm": 3.6255017169228205, + "learning_rate": 1.0255657318962089e-05, + "loss": 1.5691, + "step": 2955 + }, + { + "epoch": 0.507293633087352, + "grad_norm": 3.075331238295357, + "learning_rate": 1.0250100723132714e-05, + "loss": 1.7295, + "step": 2956 + }, + { + "epoch": 0.507465247983525, + "grad_norm": 3.4729559836605866, + "learning_rate": 1.0244544050033435e-05, + "loss": 1.6283, + "step": 2957 + }, + { + "epoch": 0.507636862879698, + "grad_norm": 3.3293556911998152, + "learning_rate": 1.0238987301381017e-05, + "loss": 1.8535, + "step": 2958 + }, + { + "epoch": 0.5078084777758709, + "grad_norm": 2.894293906237257, + "learning_rate": 1.0233430478892242e-05, + "loss": 1.4294, + "step": 2959 + }, + { + "epoch": 0.5079800926720439, + "grad_norm": 2.9224548092868163, + "learning_rate": 1.0227873584283918e-05, + "loss": 1.692, + "step": 2960 + }, + { + "epoch": 0.5081517075682169, + "grad_norm": 3.6853315435030893, + "learning_rate": 1.022231661927288e-05, + "loss": 1.5805, + "step": 2961 + }, + { + "epoch": 0.5083233224643899, + "grad_norm": 3.402319340749703, + "learning_rate": 1.0216759585575979e-05, + "loss": 1.8483, + "step": 2962 + }, + { + "epoch": 0.508494937360563, + "grad_norm": 3.3972757193184138, + "learning_rate": 1.0211202484910088e-05, + "loss": 1.6312, + "step": 2963 + }, + { + "epoch": 0.5086665522567358, + "grad_norm": 2.9433187632711126, + "learning_rate": 1.0205645318992107e-05, + "loss": 1.6051, + "step": 2964 + }, + { + "epoch": 0.5088381671529089, + "grad_norm": 3.828306753602516, + "learning_rate": 1.0200088089538944e-05, + "loss": 1.7072, + "step": 2965 + }, + { + "epoch": 0.5090097820490819, + "grad_norm": 3.7850638048350733, + "learning_rate": 1.0194530798267538e-05, + "loss": 1.7303, + "step": 2966 + }, + { + "epoch": 0.5091813969452549, + "grad_norm": 2.862372877817793, + "learning_rate": 1.0188973446894844e-05, + "loss": 1.4762, + "step": 2967 + }, + { + "epoch": 0.5093530118414278, + "grad_norm": 3.0941524300321763, + "learning_rate": 1.0183416037137828e-05, + "loss": 1.59, + "step": 2968 + }, + { + "epoch": 0.5095246267376008, + "grad_norm": 3.265643471826439, + "learning_rate": 1.0177858570713486e-05, + "loss": 1.8967, + "step": 2969 + }, + { + "epoch": 0.5096962416337738, + "grad_norm": 3.2477090029599367, + "learning_rate": 1.0172301049338822e-05, + "loss": 1.6869, + "step": 2970 + }, + { + "epoch": 0.5098678565299468, + "grad_norm": 4.110351359031347, + "learning_rate": 1.0166743474730865e-05, + "loss": 1.6598, + "step": 2971 + }, + { + "epoch": 0.5100394714261198, + "grad_norm": 3.6985490106066257, + "learning_rate": 1.0161185848606652e-05, + "loss": 1.8339, + "step": 2972 + }, + { + "epoch": 0.5102110863222927, + "grad_norm": 3.1256289600190095, + "learning_rate": 1.0155628172683245e-05, + "loss": 1.5702, + "step": 2973 + }, + { + "epoch": 0.5103827012184657, + "grad_norm": 3.1956141812260808, + "learning_rate": 1.0150070448677714e-05, + "loss": 1.6593, + "step": 2974 + }, + { + "epoch": 0.5105543161146388, + "grad_norm": 3.9260590753888787, + "learning_rate": 1.014451267830714e-05, + "loss": 1.5328, + "step": 2975 + }, + { + "epoch": 0.5107259310108118, + "grad_norm": 6.240302313851187, + "learning_rate": 1.013895486328864e-05, + "loss": 1.6802, + "step": 2976 + }, + { + "epoch": 0.5108975459069848, + "grad_norm": 2.679494721565575, + "learning_rate": 1.0133397005339313e-05, + "loss": 1.5386, + "step": 2977 + }, + { + "epoch": 0.5110691608031577, + "grad_norm": 3.030778005303316, + "learning_rate": 1.01278391061763e-05, + "loss": 1.578, + "step": 2978 + }, + { + "epoch": 0.5112407756993307, + "grad_norm": 4.072516500511933, + "learning_rate": 1.0122281167516736e-05, + "loss": 1.5942, + "step": 2979 + }, + { + "epoch": 0.5114123905955037, + "grad_norm": 3.3160383021214788, + "learning_rate": 1.0116723191077775e-05, + "loss": 1.6524, + "step": 2980 + }, + { + "epoch": 0.5115840054916767, + "grad_norm": 3.348323569115134, + "learning_rate": 1.0111165178576587e-05, + "loss": 1.7913, + "step": 2981 + }, + { + "epoch": 0.5117556203878497, + "grad_norm": 3.213964036579384, + "learning_rate": 1.0105607131730343e-05, + "loss": 1.6197, + "step": 2982 + }, + { + "epoch": 0.5119272352840226, + "grad_norm": 2.8553108231672235, + "learning_rate": 1.0100049052256236e-05, + "loss": 1.489, + "step": 2983 + }, + { + "epoch": 0.5120988501801956, + "grad_norm": 3.15221880149023, + "learning_rate": 1.0094490941871456e-05, + "loss": 1.7178, + "step": 2984 + }, + { + "epoch": 0.5122704650763686, + "grad_norm": 2.877827086883412, + "learning_rate": 1.0088932802293214e-05, + "loss": 1.566, + "step": 2985 + }, + { + "epoch": 0.5124420799725417, + "grad_norm": 4.031015391832658, + "learning_rate": 1.0083374635238728e-05, + "loss": 1.6965, + "step": 2986 + }, + { + "epoch": 0.5126136948687146, + "grad_norm": 2.7359894328298635, + "learning_rate": 1.0077816442425216e-05, + "loss": 1.313, + "step": 2987 + }, + { + "epoch": 0.5127853097648876, + "grad_norm": 3.151592104093204, + "learning_rate": 1.0072258225569919e-05, + "loss": 1.8535, + "step": 2988 + }, + { + "epoch": 0.5129569246610606, + "grad_norm": 2.379391053213078, + "learning_rate": 1.0066699986390067e-05, + "loss": 1.3617, + "step": 2989 + }, + { + "epoch": 0.5131285395572336, + "grad_norm": 4.436334611424934, + "learning_rate": 1.0061141726602912e-05, + "loss": 1.7935, + "step": 2990 + }, + { + "epoch": 0.5133001544534066, + "grad_norm": 3.712016639280695, + "learning_rate": 1.005558344792571e-05, + "loss": 1.7661, + "step": 2991 + }, + { + "epoch": 0.5134717693495795, + "grad_norm": 3.1979438906163433, + "learning_rate": 1.0050025152075711e-05, + "loss": 1.5041, + "step": 2992 + }, + { + "epoch": 0.5136433842457525, + "grad_norm": 3.5715845810873246, + "learning_rate": 1.0044466840770186e-05, + "loss": 1.6445, + "step": 2993 + }, + { + "epoch": 0.5138149991419255, + "grad_norm": 3.3884576562126116, + "learning_rate": 1.0038908515726403e-05, + "loss": 1.4902, + "step": 2994 + }, + { + "epoch": 0.5139866140380985, + "grad_norm": 3.544862197998934, + "learning_rate": 1.0033350178661633e-05, + "loss": 1.6072, + "step": 2995 + }, + { + "epoch": 0.5141582289342715, + "grad_norm": 2.714391497465356, + "learning_rate": 1.0027791831293152e-05, + "loss": 1.5935, + "step": 2996 + }, + { + "epoch": 0.5143298438304444, + "grad_norm": 2.626893230202131, + "learning_rate": 1.0022233475338245e-05, + "loss": 1.681, + "step": 2997 + }, + { + "epoch": 0.5145014587266175, + "grad_norm": 3.107941559695089, + "learning_rate": 1.0016675112514192e-05, + "loss": 1.6247, + "step": 2998 + }, + { + "epoch": 0.5146730736227905, + "grad_norm": 3.212739318253138, + "learning_rate": 1.0011116744538271e-05, + "loss": 1.5936, + "step": 2999 + }, + { + "epoch": 0.5148446885189635, + "grad_norm": 2.8151035143881327, + "learning_rate": 1.0005558373127784e-05, + "loss": 1.4084, + "step": 3000 + }, + { + "epoch": 0.5150163034151364, + "grad_norm": 3.386392327409711, + "learning_rate": 1e-05, + "loss": 1.5833, + "step": 3001 + }, + { + "epoch": 0.5151879183113094, + "grad_norm": 2.9561337237382714, + "learning_rate": 9.99444162687222e-06, + "loss": 1.5548, + "step": 3002 + }, + { + "epoch": 0.5153595332074824, + "grad_norm": 3.2153208004661926, + "learning_rate": 9.988883255461729e-06, + "loss": 1.4379, + "step": 3003 + }, + { + "epoch": 0.5155311481036554, + "grad_norm": 2.898719742125427, + "learning_rate": 9.98332488748581e-06, + "loss": 1.6423, + "step": 3004 + }, + { + "epoch": 0.5157027629998284, + "grad_norm": 3.9387096897170597, + "learning_rate": 9.97776652466176e-06, + "loss": 1.7711, + "step": 3005 + }, + { + "epoch": 0.5158743778960013, + "grad_norm": 4.105848117113761, + "learning_rate": 9.97220816870685e-06, + "loss": 1.7484, + "step": 3006 + }, + { + "epoch": 0.5160459927921743, + "grad_norm": 2.834630096278617, + "learning_rate": 9.966649821338368e-06, + "loss": 1.7465, + "step": 3007 + }, + { + "epoch": 0.5162176076883473, + "grad_norm": 3.064264649089282, + "learning_rate": 9.961091484273599e-06, + "loss": 1.8399, + "step": 3008 + }, + { + "epoch": 0.5163892225845204, + "grad_norm": 3.693562209048199, + "learning_rate": 9.955533159229814e-06, + "loss": 1.5476, + "step": 3009 + }, + { + "epoch": 0.5165608374806934, + "grad_norm": 3.4280264854946267, + "learning_rate": 9.94997484792429e-06, + "loss": 1.5403, + "step": 3010 + }, + { + "epoch": 0.5167324523768663, + "grad_norm": 4.573582908609637, + "learning_rate": 9.944416552074296e-06, + "loss": 1.6736, + "step": 3011 + }, + { + "epoch": 0.5169040672730393, + "grad_norm": 3.5798959139392132, + "learning_rate": 9.938858273397091e-06, + "loss": 1.5538, + "step": 3012 + }, + { + "epoch": 0.5170756821692123, + "grad_norm": 3.030795910708309, + "learning_rate": 9.933300013609936e-06, + "loss": 1.6508, + "step": 3013 + }, + { + "epoch": 0.5172472970653853, + "grad_norm": 2.880406233014958, + "learning_rate": 9.927741774430084e-06, + "loss": 1.6224, + "step": 3014 + }, + { + "epoch": 0.5174189119615583, + "grad_norm": 3.113151783626446, + "learning_rate": 9.922183557574786e-06, + "loss": 1.688, + "step": 3015 + }, + { + "epoch": 0.5175905268577312, + "grad_norm": 3.3465255811606074, + "learning_rate": 9.916625364761274e-06, + "loss": 1.5959, + "step": 3016 + }, + { + "epoch": 0.5177621417539042, + "grad_norm": 2.9595810525444146, + "learning_rate": 9.91106719770679e-06, + "loss": 1.4398, + "step": 3017 + }, + { + "epoch": 0.5179337566500772, + "grad_norm": 2.6469513817153705, + "learning_rate": 9.905509058128547e-06, + "loss": 1.2602, + "step": 3018 + }, + { + "epoch": 0.5181053715462502, + "grad_norm": 2.8632946620707704, + "learning_rate": 9.899950947743767e-06, + "loss": 1.6541, + "step": 3019 + }, + { + "epoch": 0.5182769864424231, + "grad_norm": 3.882779895762631, + "learning_rate": 9.894392868269658e-06, + "loss": 1.7289, + "step": 3020 + }, + { + "epoch": 0.5184486013385962, + "grad_norm": 3.223046716096657, + "learning_rate": 9.888834821423415e-06, + "loss": 1.7866, + "step": 3021 + }, + { + "epoch": 0.5186202162347692, + "grad_norm": 2.920297766415385, + "learning_rate": 9.883276808922225e-06, + "loss": 1.597, + "step": 3022 + }, + { + "epoch": 0.5187918311309422, + "grad_norm": 3.4869738402894552, + "learning_rate": 9.877718832483269e-06, + "loss": 1.8228, + "step": 3023 + }, + { + "epoch": 0.5189634460271152, + "grad_norm": 3.0568271775035143, + "learning_rate": 9.872160893823702e-06, + "loss": 1.8056, + "step": 3024 + }, + { + "epoch": 0.5191350609232881, + "grad_norm": 2.66876404811824, + "learning_rate": 9.866602994660688e-06, + "loss": 1.6332, + "step": 3025 + }, + { + "epoch": 0.5193066758194611, + "grad_norm": 3.203141537484068, + "learning_rate": 9.861045136711362e-06, + "loss": 1.7426, + "step": 3026 + }, + { + "epoch": 0.5194782907156341, + "grad_norm": 3.0591674814064147, + "learning_rate": 9.85548732169286e-06, + "loss": 1.3971, + "step": 3027 + }, + { + "epoch": 0.5196499056118071, + "grad_norm": 3.2259742354114223, + "learning_rate": 9.849929551322288e-06, + "loss": 1.511, + "step": 3028 + }, + { + "epoch": 0.5198215205079801, + "grad_norm": 2.9362744614163336, + "learning_rate": 9.84437182731676e-06, + "loss": 1.5222, + "step": 3029 + }, + { + "epoch": 0.519993135404153, + "grad_norm": 3.486300445761983, + "learning_rate": 9.83881415139335e-06, + "loss": 1.7279, + "step": 3030 + }, + { + "epoch": 0.520164750300326, + "grad_norm": 2.807954713021253, + "learning_rate": 9.833256525269136e-06, + "loss": 1.4414, + "step": 3031 + }, + { + "epoch": 0.5203363651964991, + "grad_norm": 3.3137180490243545, + "learning_rate": 9.82769895066118e-06, + "loss": 1.5196, + "step": 3032 + }, + { + "epoch": 0.5205079800926721, + "grad_norm": 2.645204340432645, + "learning_rate": 9.822141429286515e-06, + "loss": 1.6183, + "step": 3033 + }, + { + "epoch": 0.5206795949888451, + "grad_norm": 3.62514299874206, + "learning_rate": 9.816583962862174e-06, + "loss": 1.7005, + "step": 3034 + }, + { + "epoch": 0.520851209885018, + "grad_norm": 3.586347181395042, + "learning_rate": 9.811026553105162e-06, + "loss": 1.6668, + "step": 3035 + }, + { + "epoch": 0.521022824781191, + "grad_norm": 2.8056090701722347, + "learning_rate": 9.805469201732463e-06, + "loss": 1.4765, + "step": 3036 + }, + { + "epoch": 0.521194439677364, + "grad_norm": 2.7152421285079256, + "learning_rate": 9.799911910461059e-06, + "loss": 1.5281, + "step": 3037 + }, + { + "epoch": 0.521366054573537, + "grad_norm": 2.8691051669555776, + "learning_rate": 9.794354681007894e-06, + "loss": 1.5387, + "step": 3038 + }, + { + "epoch": 0.5215376694697099, + "grad_norm": 2.9418932340943575, + "learning_rate": 9.788797515089912e-06, + "loss": 1.1927, + "step": 3039 + }, + { + "epoch": 0.5217092843658829, + "grad_norm": 3.2460695971899836, + "learning_rate": 9.783240414424021e-06, + "loss": 1.7296, + "step": 3040 + }, + { + "epoch": 0.5218808992620559, + "grad_norm": 3.3727553693915806, + "learning_rate": 9.777683380727123e-06, + "loss": 1.8422, + "step": 3041 + }, + { + "epoch": 0.522052514158229, + "grad_norm": 3.846914794687572, + "learning_rate": 9.772126415716085e-06, + "loss": 1.4607, + "step": 3042 + }, + { + "epoch": 0.522224129054402, + "grad_norm": 3.209946771691414, + "learning_rate": 9.766569521107762e-06, + "loss": 1.5448, + "step": 3043 + }, + { + "epoch": 0.5223957439505749, + "grad_norm": 3.486569670983471, + "learning_rate": 9.761012698618985e-06, + "loss": 1.672, + "step": 3044 + }, + { + "epoch": 0.5225673588467479, + "grad_norm": 3.2763119287848013, + "learning_rate": 9.755455949966565e-06, + "loss": 1.6314, + "step": 3045 + }, + { + "epoch": 0.5227389737429209, + "grad_norm": 4.3359332117927245, + "learning_rate": 9.749899276867286e-06, + "loss": 1.8492, + "step": 3046 + }, + { + "epoch": 0.5229105886390939, + "grad_norm": 3.058358055627463, + "learning_rate": 9.744342681037916e-06, + "loss": 1.5919, + "step": 3047 + }, + { + "epoch": 0.5230822035352669, + "grad_norm": 2.8332424891756327, + "learning_rate": 9.738786164195183e-06, + "loss": 1.7999, + "step": 3048 + }, + { + "epoch": 0.5232538184314398, + "grad_norm": 3.2866534189140126, + "learning_rate": 9.73322972805581e-06, + "loss": 1.6018, + "step": 3049 + }, + { + "epoch": 0.5234254333276128, + "grad_norm": 3.195891653244995, + "learning_rate": 9.727673374336483e-06, + "loss": 1.7746, + "step": 3050 + }, + { + "epoch": 0.5235970482237858, + "grad_norm": 3.6342109755862966, + "learning_rate": 9.72211710475387e-06, + "loss": 1.578, + "step": 3051 + }, + { + "epoch": 0.5237686631199588, + "grad_norm": 3.003703295219066, + "learning_rate": 9.716560921024604e-06, + "loss": 1.7425, + "step": 3052 + }, + { + "epoch": 0.5239402780161319, + "grad_norm": 3.2432828461897687, + "learning_rate": 9.711004824865304e-06, + "loss": 1.5037, + "step": 3053 + }, + { + "epoch": 0.5241118929123048, + "grad_norm": 2.692755791278498, + "learning_rate": 9.705448817992546e-06, + "loss": 1.3273, + "step": 3054 + }, + { + "epoch": 0.5242835078084778, + "grad_norm": 3.609256015040475, + "learning_rate": 9.699892902122887e-06, + "loss": 1.714, + "step": 3055 + }, + { + "epoch": 0.5244551227046508, + "grad_norm": 3.1870651312966904, + "learning_rate": 9.694337078972864e-06, + "loss": 1.6096, + "step": 3056 + }, + { + "epoch": 0.5246267376008238, + "grad_norm": 3.235420185447154, + "learning_rate": 9.688781350258968e-06, + "loss": 1.6898, + "step": 3057 + }, + { + "epoch": 0.5247983524969967, + "grad_norm": 3.6810319709805546, + "learning_rate": 9.683225717697676e-06, + "loss": 1.4189, + "step": 3058 + }, + { + "epoch": 0.5249699673931697, + "grad_norm": 3.25925483502963, + "learning_rate": 9.67767018300543e-06, + "loss": 1.8755, + "step": 3059 + }, + { + "epoch": 0.5251415822893427, + "grad_norm": 3.8320906280394906, + "learning_rate": 9.672114747898635e-06, + "loss": 1.6695, + "step": 3060 + }, + { + "epoch": 0.5253131971855157, + "grad_norm": 2.8587355719532934, + "learning_rate": 9.666559414093677e-06, + "loss": 1.6293, + "step": 3061 + }, + { + "epoch": 0.5254848120816887, + "grad_norm": 2.6877686928836897, + "learning_rate": 9.6610041833069e-06, + "loss": 1.6911, + "step": 3062 + }, + { + "epoch": 0.5256564269778616, + "grad_norm": 3.871341156583771, + "learning_rate": 9.655449057254626e-06, + "loss": 1.6207, + "step": 3063 + }, + { + "epoch": 0.5258280418740346, + "grad_norm": 2.9626490480956504, + "learning_rate": 9.649894037653134e-06, + "loss": 1.5107, + "step": 3064 + }, + { + "epoch": 0.5259996567702077, + "grad_norm": 2.9580495649736647, + "learning_rate": 9.644339126218685e-06, + "loss": 1.629, + "step": 3065 + }, + { + "epoch": 0.5261712716663807, + "grad_norm": 3.0515758048142048, + "learning_rate": 9.63878432466749e-06, + "loss": 1.4563, + "step": 3066 + }, + { + "epoch": 0.5263428865625537, + "grad_norm": 2.8631908037679143, + "learning_rate": 9.633229634715734e-06, + "loss": 1.6682, + "step": 3067 + }, + { + "epoch": 0.5265145014587266, + "grad_norm": 3.8304617879037144, + "learning_rate": 9.627675058079568e-06, + "loss": 1.7055, + "step": 3068 + }, + { + "epoch": 0.5266861163548996, + "grad_norm": 3.94461020436735, + "learning_rate": 9.622120596475106e-06, + "loss": 1.778, + "step": 3069 + }, + { + "epoch": 0.5268577312510726, + "grad_norm": 3.1485992130098026, + "learning_rate": 9.61656625161843e-06, + "loss": 1.5378, + "step": 3070 + }, + { + "epoch": 0.5270293461472456, + "grad_norm": 3.4000883307667227, + "learning_rate": 9.611012025225586e-06, + "loss": 1.782, + "step": 3071 + }, + { + "epoch": 0.5272009610434185, + "grad_norm": 3.4845712419648374, + "learning_rate": 9.605457919012573e-06, + "loss": 1.5906, + "step": 3072 + }, + { + "epoch": 0.5273725759395915, + "grad_norm": 2.997439696912847, + "learning_rate": 9.599903934695366e-06, + "loss": 1.5391, + "step": 3073 + }, + { + "epoch": 0.5275441908357645, + "grad_norm": 3.2332163603639534, + "learning_rate": 9.594350073989894e-06, + "loss": 1.724, + "step": 3074 + }, + { + "epoch": 0.5277158057319375, + "grad_norm": 3.0070930550206905, + "learning_rate": 9.588796338612054e-06, + "loss": 1.5411, + "step": 3075 + }, + { + "epoch": 0.5278874206281106, + "grad_norm": 2.8369221225138164, + "learning_rate": 9.583242730277699e-06, + "loss": 1.4523, + "step": 3076 + }, + { + "epoch": 0.5280590355242835, + "grad_norm": 3.0637510898392435, + "learning_rate": 9.577689250702648e-06, + "loss": 1.4609, + "step": 3077 + }, + { + "epoch": 0.5282306504204565, + "grad_norm": 3.1841349008713564, + "learning_rate": 9.572135901602674e-06, + "loss": 1.5989, + "step": 3078 + }, + { + "epoch": 0.5284022653166295, + "grad_norm": 2.4662320141750036, + "learning_rate": 9.566582684693511e-06, + "loss": 1.555, + "step": 3079 + }, + { + "epoch": 0.5285738802128025, + "grad_norm": 4.4480395594401285, + "learning_rate": 9.561029601690857e-06, + "loss": 1.5729, + "step": 3080 + }, + { + "epoch": 0.5287454951089755, + "grad_norm": 3.348426964930606, + "learning_rate": 9.555476654310364e-06, + "loss": 1.6402, + "step": 3081 + }, + { + "epoch": 0.5289171100051484, + "grad_norm": 2.646244640858748, + "learning_rate": 9.549923844267645e-06, + "loss": 1.5573, + "step": 3082 + }, + { + "epoch": 0.5290887249013214, + "grad_norm": 3.807154859508447, + "learning_rate": 9.544371173278272e-06, + "loss": 1.6559, + "step": 3083 + }, + { + "epoch": 0.5292603397974944, + "grad_norm": 2.5699535991811344, + "learning_rate": 9.538818643057764e-06, + "loss": 1.4321, + "step": 3084 + }, + { + "epoch": 0.5294319546936674, + "grad_norm": 2.7079988731211966, + "learning_rate": 9.53326625532161e-06, + "loss": 1.5292, + "step": 3085 + }, + { + "epoch": 0.5296035695898405, + "grad_norm": 3.079291556509928, + "learning_rate": 9.527714011785242e-06, + "loss": 1.5964, + "step": 3086 + }, + { + "epoch": 0.5297751844860134, + "grad_norm": 3.5112551476220415, + "learning_rate": 9.522161914164062e-06, + "loss": 1.6439, + "step": 3087 + }, + { + "epoch": 0.5299467993821864, + "grad_norm": 2.6316422699071955, + "learning_rate": 9.516609964173415e-06, + "loss": 1.5045, + "step": 3088 + }, + { + "epoch": 0.5301184142783594, + "grad_norm": 2.8356694587918763, + "learning_rate": 9.511058163528609e-06, + "loss": 1.8322, + "step": 3089 + }, + { + "epoch": 0.5302900291745324, + "grad_norm": 3.0095093652520353, + "learning_rate": 9.505506513944895e-06, + "loss": 1.7701, + "step": 3090 + }, + { + "epoch": 0.5304616440707053, + "grad_norm": 3.7078079876066483, + "learning_rate": 9.499955017137485e-06, + "loss": 1.8798, + "step": 3091 + }, + { + "epoch": 0.5306332589668783, + "grad_norm": 4.049965696696388, + "learning_rate": 9.494403674821544e-06, + "loss": 1.8037, + "step": 3092 + }, + { + "epoch": 0.5308048738630513, + "grad_norm": 3.3324218702154194, + "learning_rate": 9.488852488712186e-06, + "loss": 1.6893, + "step": 3093 + }, + { + "epoch": 0.5309764887592243, + "grad_norm": 2.8581293827071406, + "learning_rate": 9.483301460524484e-06, + "loss": 1.8557, + "step": 3094 + }, + { + "epoch": 0.5311481036553973, + "grad_norm": 3.473225541807752, + "learning_rate": 9.477750591973452e-06, + "loss": 1.6691, + "step": 3095 + }, + { + "epoch": 0.5313197185515702, + "grad_norm": 3.5794322951330813, + "learning_rate": 9.472199884774057e-06, + "loss": 1.7149, + "step": 3096 + }, + { + "epoch": 0.5314913334477432, + "grad_norm": 2.7382333920686834, + "learning_rate": 9.466649340641224e-06, + "loss": 1.6974, + "step": 3097 + }, + { + "epoch": 0.5316629483439163, + "grad_norm": 2.887841404324223, + "learning_rate": 9.461098961289819e-06, + "loss": 1.8367, + "step": 3098 + }, + { + "epoch": 0.5318345632400893, + "grad_norm": 3.199515880275442, + "learning_rate": 9.45554874843466e-06, + "loss": 1.6463, + "step": 3099 + }, + { + "epoch": 0.5320061781362623, + "grad_norm": 2.7367523527859325, + "learning_rate": 9.449998703790515e-06, + "loss": 1.4332, + "step": 3100 + }, + { + "epoch": 0.5321777930324352, + "grad_norm": 3.1339916092169706, + "learning_rate": 9.444448829072103e-06, + "loss": 1.8334, + "step": 3101 + }, + { + "epoch": 0.5323494079286082, + "grad_norm": 3.1675956152228326, + "learning_rate": 9.438899125994078e-06, + "loss": 1.5957, + "step": 3102 + }, + { + "epoch": 0.5325210228247812, + "grad_norm": 2.964338482506634, + "learning_rate": 9.433349596271053e-06, + "loss": 1.6785, + "step": 3103 + }, + { + "epoch": 0.5326926377209542, + "grad_norm": 3.8456787896391362, + "learning_rate": 9.427800241617585e-06, + "loss": 1.6171, + "step": 3104 + }, + { + "epoch": 0.5328642526171272, + "grad_norm": 2.8051100245106744, + "learning_rate": 9.422251063748173e-06, + "loss": 1.5772, + "step": 3105 + }, + { + "epoch": 0.5330358675133001, + "grad_norm": 3.224182021090952, + "learning_rate": 9.416702064377268e-06, + "loss": 1.7539, + "step": 3106 + }, + { + "epoch": 0.5332074824094731, + "grad_norm": 3.406151133598625, + "learning_rate": 9.411153245219262e-06, + "loss": 1.8219, + "step": 3107 + }, + { + "epoch": 0.5333790973056461, + "grad_norm": 2.8712927931766603, + "learning_rate": 9.405604607988484e-06, + "loss": 1.6901, + "step": 3108 + }, + { + "epoch": 0.5335507122018192, + "grad_norm": 2.7512510766676535, + "learning_rate": 9.40005615439922e-06, + "loss": 1.447, + "step": 3109 + }, + { + "epoch": 0.533722327097992, + "grad_norm": 2.9091697168803616, + "learning_rate": 9.394507886165692e-06, + "loss": 1.4168, + "step": 3110 + }, + { + "epoch": 0.5338939419941651, + "grad_norm": 3.4704008075401305, + "learning_rate": 9.388959805002067e-06, + "loss": 1.7488, + "step": 3111 + }, + { + "epoch": 0.5340655568903381, + "grad_norm": 3.5895672566890706, + "learning_rate": 9.383411912622451e-06, + "loss": 1.7046, + "step": 3112 + }, + { + "epoch": 0.5342371717865111, + "grad_norm": 2.9292126485329453, + "learning_rate": 9.3778642107409e-06, + "loss": 1.6853, + "step": 3113 + }, + { + "epoch": 0.5344087866826841, + "grad_norm": 3.568739642159117, + "learning_rate": 9.372316701071395e-06, + "loss": 1.5751, + "step": 3114 + }, + { + "epoch": 0.534580401578857, + "grad_norm": 3.1154170172613482, + "learning_rate": 9.366769385327875e-06, + "loss": 1.7357, + "step": 3115 + }, + { + "epoch": 0.53475201647503, + "grad_norm": 2.539015831059933, + "learning_rate": 9.361222265224208e-06, + "loss": 1.3283, + "step": 3116 + }, + { + "epoch": 0.534923631371203, + "grad_norm": 3.3481529316957417, + "learning_rate": 9.355675342474207e-06, + "loss": 1.42, + "step": 3117 + }, + { + "epoch": 0.535095246267376, + "grad_norm": 3.664106804156913, + "learning_rate": 9.350128618791622e-06, + "loss": 1.8234, + "step": 3118 + }, + { + "epoch": 0.535266861163549, + "grad_norm": 3.1141428945684364, + "learning_rate": 9.344582095890146e-06, + "loss": 1.7784, + "step": 3119 + }, + { + "epoch": 0.535438476059722, + "grad_norm": 3.2260373550648382, + "learning_rate": 9.339035775483397e-06, + "loss": 1.849, + "step": 3120 + }, + { + "epoch": 0.535610090955895, + "grad_norm": 3.4806057930306125, + "learning_rate": 9.333489659284945e-06, + "loss": 1.769, + "step": 3121 + }, + { + "epoch": 0.535781705852068, + "grad_norm": 3.0437960640118646, + "learning_rate": 9.327943749008291e-06, + "loss": 1.7897, + "step": 3122 + }, + { + "epoch": 0.535953320748241, + "grad_norm": 3.322013640504874, + "learning_rate": 9.32239804636687e-06, + "loss": 1.6058, + "step": 3123 + }, + { + "epoch": 0.5361249356444139, + "grad_norm": 2.8260356895638363, + "learning_rate": 9.316852553074057e-06, + "loss": 1.6125, + "step": 3124 + }, + { + "epoch": 0.5362965505405869, + "grad_norm": 6.503217832256964, + "learning_rate": 9.311307270843163e-06, + "loss": 1.5118, + "step": 3125 + }, + { + "epoch": 0.5364681654367599, + "grad_norm": 2.82857335316144, + "learning_rate": 9.305762201387429e-06, + "loss": 1.5668, + "step": 3126 + }, + { + "epoch": 0.5366397803329329, + "grad_norm": 2.9669791648103843, + "learning_rate": 9.300217346420029e-06, + "loss": 1.4485, + "step": 3127 + }, + { + "epoch": 0.5368113952291059, + "grad_norm": 3.367680783024022, + "learning_rate": 9.294672707654082e-06, + "loss": 1.5547, + "step": 3128 + }, + { + "epoch": 0.5369830101252788, + "grad_norm": 3.4903317504869467, + "learning_rate": 9.289128286802626e-06, + "loss": 1.5755, + "step": 3129 + }, + { + "epoch": 0.5371546250214518, + "grad_norm": 3.1530723549601527, + "learning_rate": 9.283584085578642e-06, + "loss": 1.7715, + "step": 3130 + }, + { + "epoch": 0.5373262399176248, + "grad_norm": 3.124468739546777, + "learning_rate": 9.278040105695044e-06, + "loss": 1.5466, + "step": 3131 + }, + { + "epoch": 0.5374978548137979, + "grad_norm": 3.945707095709437, + "learning_rate": 9.27249634886466e-06, + "loss": 1.4369, + "step": 3132 + }, + { + "epoch": 0.5376694697099709, + "grad_norm": 3.831407487967149, + "learning_rate": 9.266952816800273e-06, + "loss": 1.6932, + "step": 3133 + }, + { + "epoch": 0.5378410846061438, + "grad_norm": 2.749762767683833, + "learning_rate": 9.261409511214583e-06, + "loss": 1.4992, + "step": 3134 + }, + { + "epoch": 0.5380126995023168, + "grad_norm": 3.2977005493599743, + "learning_rate": 9.25586643382022e-06, + "loss": 1.6776, + "step": 3135 + }, + { + "epoch": 0.5381843143984898, + "grad_norm": 4.067565282481566, + "learning_rate": 9.25032358632975e-06, + "loss": 1.4197, + "step": 3136 + }, + { + "epoch": 0.5383559292946628, + "grad_norm": 3.7995147190027736, + "learning_rate": 9.244780970455667e-06, + "loss": 1.7455, + "step": 3137 + }, + { + "epoch": 0.5385275441908358, + "grad_norm": 3.103530138190673, + "learning_rate": 9.239238587910383e-06, + "loss": 1.648, + "step": 3138 + }, + { + "epoch": 0.5386991590870087, + "grad_norm": 2.685026120240778, + "learning_rate": 9.233696440406248e-06, + "loss": 1.4128, + "step": 3139 + }, + { + "epoch": 0.5388707739831817, + "grad_norm": 3.435715831410238, + "learning_rate": 9.228154529655541e-06, + "loss": 1.5112, + "step": 3140 + }, + { + "epoch": 0.5390423888793547, + "grad_norm": 2.9651458083613584, + "learning_rate": 9.222612857370457e-06, + "loss": 1.6358, + "step": 3141 + }, + { + "epoch": 0.5392140037755278, + "grad_norm": 3.8790807136137473, + "learning_rate": 9.217071425263133e-06, + "loss": 1.8737, + "step": 3142 + }, + { + "epoch": 0.5393856186717007, + "grad_norm": 2.98994425488071, + "learning_rate": 9.211530235045619e-06, + "loss": 1.4321, + "step": 3143 + }, + { + "epoch": 0.5395572335678737, + "grad_norm": 2.7756326011718397, + "learning_rate": 9.205989288429887e-06, + "loss": 1.5311, + "step": 3144 + }, + { + "epoch": 0.5397288484640467, + "grad_norm": 2.7959937854848054, + "learning_rate": 9.200448587127852e-06, + "loss": 1.474, + "step": 3145 + }, + { + "epoch": 0.5399004633602197, + "grad_norm": 3.159795372265623, + "learning_rate": 9.194908132851335e-06, + "loss": 1.7627, + "step": 3146 + }, + { + "epoch": 0.5400720782563927, + "grad_norm": 4.014115503951635, + "learning_rate": 9.18936792731209e-06, + "loss": 1.5017, + "step": 3147 + }, + { + "epoch": 0.5402436931525656, + "grad_norm": 4.094963679713884, + "learning_rate": 9.183827972221793e-06, + "loss": 1.8115, + "step": 3148 + }, + { + "epoch": 0.5404153080487386, + "grad_norm": 3.801222655142659, + "learning_rate": 9.178288269292044e-06, + "loss": 1.5325, + "step": 3149 + }, + { + "epoch": 0.5405869229449116, + "grad_norm": 3.224236046502278, + "learning_rate": 9.172748820234357e-06, + "loss": 1.5453, + "step": 3150 + }, + { + "epoch": 0.5407585378410846, + "grad_norm": 3.3694237934199216, + "learning_rate": 9.167209626760173e-06, + "loss": 1.6274, + "step": 3151 + }, + { + "epoch": 0.5409301527372576, + "grad_norm": 2.6918086154650154, + "learning_rate": 9.161670690580861e-06, + "loss": 1.3827, + "step": 3152 + }, + { + "epoch": 0.5411017676334305, + "grad_norm": 3.564404061384756, + "learning_rate": 9.156132013407698e-06, + "loss": 1.6679, + "step": 3153 + }, + { + "epoch": 0.5412733825296036, + "grad_norm": 3.9773108418994694, + "learning_rate": 9.150593596951888e-06, + "loss": 1.6985, + "step": 3154 + }, + { + "epoch": 0.5414449974257766, + "grad_norm": 3.2267741107650285, + "learning_rate": 9.14505544292456e-06, + "loss": 1.6322, + "step": 3155 + }, + { + "epoch": 0.5416166123219496, + "grad_norm": 2.705750818995077, + "learning_rate": 9.139517553036743e-06, + "loss": 1.49, + "step": 3156 + }, + { + "epoch": 0.5417882272181226, + "grad_norm": 3.7191539059704244, + "learning_rate": 9.133979928999404e-06, + "loss": 1.914, + "step": 3157 + }, + { + "epoch": 0.5419598421142955, + "grad_norm": 3.853330523390222, + "learning_rate": 9.128442572523418e-06, + "loss": 1.6405, + "step": 3158 + }, + { + "epoch": 0.5421314570104685, + "grad_norm": 3.6269421619656335, + "learning_rate": 9.122905485319583e-06, + "loss": 1.7037, + "step": 3159 + }, + { + "epoch": 0.5423030719066415, + "grad_norm": 3.281591999021539, + "learning_rate": 9.117368669098606e-06, + "loss": 1.7438, + "step": 3160 + }, + { + "epoch": 0.5424746868028145, + "grad_norm": 3.0282696455524576, + "learning_rate": 9.111832125571122e-06, + "loss": 1.3565, + "step": 3161 + }, + { + "epoch": 0.5426463016989874, + "grad_norm": 4.260601910693571, + "learning_rate": 9.106295856447669e-06, + "loss": 1.6524, + "step": 3162 + }, + { + "epoch": 0.5428179165951604, + "grad_norm": 3.625124872919927, + "learning_rate": 9.100759863438702e-06, + "loss": 1.5342, + "step": 3163 + }, + { + "epoch": 0.5429895314913334, + "grad_norm": 3.534147315392249, + "learning_rate": 9.095224148254603e-06, + "loss": 1.7402, + "step": 3164 + }, + { + "epoch": 0.5431611463875065, + "grad_norm": 3.26583741777128, + "learning_rate": 9.089688712605653e-06, + "loss": 1.8655, + "step": 3165 + }, + { + "epoch": 0.5433327612836795, + "grad_norm": 4.2500124236058525, + "learning_rate": 9.084153558202058e-06, + "loss": 1.7281, + "step": 3166 + }, + { + "epoch": 0.5435043761798524, + "grad_norm": 2.9720610009778494, + "learning_rate": 9.078618686753933e-06, + "loss": 1.5478, + "step": 3167 + }, + { + "epoch": 0.5436759910760254, + "grad_norm": 2.8446562522949614, + "learning_rate": 9.073084099971298e-06, + "loss": 1.522, + "step": 3168 + }, + { + "epoch": 0.5438476059721984, + "grad_norm": 4.0619232355186545, + "learning_rate": 9.067549799564101e-06, + "loss": 1.8494, + "step": 3169 + }, + { + "epoch": 0.5440192208683714, + "grad_norm": 3.31855036678171, + "learning_rate": 9.062015787242184e-06, + "loss": 1.4725, + "step": 3170 + }, + { + "epoch": 0.5441908357645444, + "grad_norm": 3.519317792347857, + "learning_rate": 9.056482064715316e-06, + "loss": 1.8027, + "step": 3171 + }, + { + "epoch": 0.5443624506607173, + "grad_norm": 3.651502367758153, + "learning_rate": 9.050948633693162e-06, + "loss": 1.6547, + "step": 3172 + }, + { + "epoch": 0.5445340655568903, + "grad_norm": 3.224645257590756, + "learning_rate": 9.045415495885313e-06, + "loss": 1.6618, + "step": 3173 + }, + { + "epoch": 0.5447056804530633, + "grad_norm": 2.6408626767559285, + "learning_rate": 9.039882653001254e-06, + "loss": 1.2583, + "step": 3174 + }, + { + "epoch": 0.5448772953492363, + "grad_norm": 2.909202745582501, + "learning_rate": 9.034350106750383e-06, + "loss": 1.5819, + "step": 3175 + }, + { + "epoch": 0.5450489102454092, + "grad_norm": 3.1864166312010496, + "learning_rate": 9.028817858842013e-06, + "loss": 1.5887, + "step": 3176 + }, + { + "epoch": 0.5452205251415823, + "grad_norm": 3.604941346577423, + "learning_rate": 9.02328591098536e-06, + "loss": 1.8248, + "step": 3177 + }, + { + "epoch": 0.5453921400377553, + "grad_norm": 3.609232795481901, + "learning_rate": 9.017754264889548e-06, + "loss": 1.7879, + "step": 3178 + }, + { + "epoch": 0.5455637549339283, + "grad_norm": 3.3737067530801066, + "learning_rate": 9.01222292226361e-06, + "loss": 1.6805, + "step": 3179 + }, + { + "epoch": 0.5457353698301013, + "grad_norm": 3.3427368799239394, + "learning_rate": 9.006691884816475e-06, + "loss": 1.4917, + "step": 3180 + }, + { + "epoch": 0.5459069847262742, + "grad_norm": 3.2118809871704537, + "learning_rate": 9.00116115425699e-06, + "loss": 1.4789, + "step": 3181 + }, + { + "epoch": 0.5460785996224472, + "grad_norm": 4.171758481561278, + "learning_rate": 8.995630732293903e-06, + "loss": 1.6471, + "step": 3182 + }, + { + "epoch": 0.5462502145186202, + "grad_norm": 2.901604661624939, + "learning_rate": 8.990100620635866e-06, + "loss": 1.4273, + "step": 3183 + }, + { + "epoch": 0.5464218294147932, + "grad_norm": 3.347255413351688, + "learning_rate": 8.984570820991433e-06, + "loss": 1.4522, + "step": 3184 + }, + { + "epoch": 0.5465934443109662, + "grad_norm": 3.003179390147555, + "learning_rate": 8.979041335069072e-06, + "loss": 1.4093, + "step": 3185 + }, + { + "epoch": 0.5467650592071391, + "grad_norm": 3.152519016463287, + "learning_rate": 8.973512164577137e-06, + "loss": 1.5751, + "step": 3186 + }, + { + "epoch": 0.5469366741033121, + "grad_norm": 3.4099552247134874, + "learning_rate": 8.967983311223898e-06, + "loss": 1.6504, + "step": 3187 + }, + { + "epoch": 0.5471082889994852, + "grad_norm": 3.0910370271648766, + "learning_rate": 8.96245477671752e-06, + "loss": 1.4828, + "step": 3188 + }, + { + "epoch": 0.5472799038956582, + "grad_norm": 2.9321717583910067, + "learning_rate": 8.956926562766073e-06, + "loss": 1.5085, + "step": 3189 + }, + { + "epoch": 0.5474515187918312, + "grad_norm": 3.860885464620784, + "learning_rate": 8.951398671077531e-06, + "loss": 1.8999, + "step": 3190 + }, + { + "epoch": 0.5476231336880041, + "grad_norm": 3.920706071802172, + "learning_rate": 8.945871103359764e-06, + "loss": 1.719, + "step": 3191 + }, + { + "epoch": 0.5477947485841771, + "grad_norm": 3.430502026203666, + "learning_rate": 8.940343861320535e-06, + "loss": 1.5829, + "step": 3192 + }, + { + "epoch": 0.5479663634803501, + "grad_norm": 3.4300209910536172, + "learning_rate": 8.934816946667522e-06, + "loss": 1.8548, + "step": 3193 + }, + { + "epoch": 0.5481379783765231, + "grad_norm": 3.9460775544773163, + "learning_rate": 8.929290361108288e-06, + "loss": 1.6156, + "step": 3194 + }, + { + "epoch": 0.548309593272696, + "grad_norm": 4.169692879001639, + "learning_rate": 8.923764106350303e-06, + "loss": 1.7912, + "step": 3195 + }, + { + "epoch": 0.548481208168869, + "grad_norm": 3.7996445962538785, + "learning_rate": 8.918238184100932e-06, + "loss": 1.5151, + "step": 3196 + }, + { + "epoch": 0.548652823065042, + "grad_norm": 3.526572977598933, + "learning_rate": 8.912712596067439e-06, + "loss": 1.6278, + "step": 3197 + }, + { + "epoch": 0.548824437961215, + "grad_norm": 4.004996777401031, + "learning_rate": 8.90718734395698e-06, + "loss": 1.6496, + "step": 3198 + }, + { + "epoch": 0.5489960528573881, + "grad_norm": 3.204871251414189, + "learning_rate": 8.901662429476607e-06, + "loss": 1.6575, + "step": 3199 + }, + { + "epoch": 0.549167667753561, + "grad_norm": 3.48869295977204, + "learning_rate": 8.896137854333276e-06, + "loss": 1.6429, + "step": 3200 + }, + { + "epoch": 0.549339282649734, + "grad_norm": 3.0777132727440244, + "learning_rate": 8.89061362023383e-06, + "loss": 1.5966, + "step": 3201 + }, + { + "epoch": 0.549510897545907, + "grad_norm": 3.130753088186336, + "learning_rate": 8.885089728885013e-06, + "loss": 1.7012, + "step": 3202 + }, + { + "epoch": 0.54968251244208, + "grad_norm": 3.8534710437305253, + "learning_rate": 8.87956618199346e-06, + "loss": 1.555, + "step": 3203 + }, + { + "epoch": 0.549854127338253, + "grad_norm": 4.17777308512548, + "learning_rate": 8.87404298126569e-06, + "loss": 1.6152, + "step": 3204 + }, + { + "epoch": 0.5500257422344259, + "grad_norm": 5.884834937691766, + "learning_rate": 8.868520128408134e-06, + "loss": 1.6929, + "step": 3205 + }, + { + "epoch": 0.5501973571305989, + "grad_norm": 3.3700435520397036, + "learning_rate": 8.862997625127101e-06, + "loss": 1.6399, + "step": 3206 + }, + { + "epoch": 0.5503689720267719, + "grad_norm": 3.2331146774163155, + "learning_rate": 8.8574754731288e-06, + "loss": 1.6229, + "step": 3207 + }, + { + "epoch": 0.5505405869229449, + "grad_norm": 2.8465536817599277, + "learning_rate": 8.851953674119325e-06, + "loss": 1.5305, + "step": 3208 + }, + { + "epoch": 0.550712201819118, + "grad_norm": 3.8719672771421205, + "learning_rate": 8.846432229804668e-06, + "loss": 1.4278, + "step": 3209 + }, + { + "epoch": 0.5508838167152909, + "grad_norm": 3.4713391910977824, + "learning_rate": 8.840911141890706e-06, + "loss": 1.4845, + "step": 3210 + }, + { + "epoch": 0.5510554316114639, + "grad_norm": 2.9990677686654994, + "learning_rate": 8.835390412083205e-06, + "loss": 1.66, + "step": 3211 + }, + { + "epoch": 0.5512270465076369, + "grad_norm": 3.1860284710225675, + "learning_rate": 8.829870042087825e-06, + "loss": 1.8805, + "step": 3212 + }, + { + "epoch": 0.5513986614038099, + "grad_norm": 3.1381371519748638, + "learning_rate": 8.824350033610111e-06, + "loss": 1.6016, + "step": 3213 + }, + { + "epoch": 0.5515702762999828, + "grad_norm": 4.248106537432824, + "learning_rate": 8.8188303883555e-06, + "loss": 1.7105, + "step": 3214 + }, + { + "epoch": 0.5517418911961558, + "grad_norm": 3.3046081324328593, + "learning_rate": 8.81331110802932e-06, + "loss": 1.7055, + "step": 3215 + }, + { + "epoch": 0.5519135060923288, + "grad_norm": 2.813984778004484, + "learning_rate": 8.807792194336767e-06, + "loss": 1.3709, + "step": 3216 + }, + { + "epoch": 0.5520851209885018, + "grad_norm": 3.290961639799164, + "learning_rate": 8.802273648982951e-06, + "loss": 1.5811, + "step": 3217 + }, + { + "epoch": 0.5522567358846748, + "grad_norm": 3.4523665038248357, + "learning_rate": 8.796755473672847e-06, + "loss": 1.6301, + "step": 3218 + }, + { + "epoch": 0.5524283507808477, + "grad_norm": 3.3566016376397108, + "learning_rate": 8.791237670111328e-06, + "loss": 1.4386, + "step": 3219 + }, + { + "epoch": 0.5525999656770207, + "grad_norm": 6.89511767181731, + "learning_rate": 8.785720240003146e-06, + "loss": 1.6216, + "step": 3220 + }, + { + "epoch": 0.5527715805731938, + "grad_norm": 3.941630622197514, + "learning_rate": 8.780203185052942e-06, + "loss": 1.8027, + "step": 3221 + }, + { + "epoch": 0.5529431954693668, + "grad_norm": 3.9660821310197085, + "learning_rate": 8.774686506965237e-06, + "loss": 1.7754, + "step": 3222 + }, + { + "epoch": 0.5531148103655398, + "grad_norm": 4.357534638582533, + "learning_rate": 8.769170207444432e-06, + "loss": 1.5571, + "step": 3223 + }, + { + "epoch": 0.5532864252617127, + "grad_norm": 4.532355692806929, + "learning_rate": 8.763654288194823e-06, + "loss": 1.7203, + "step": 3224 + }, + { + "epoch": 0.5534580401578857, + "grad_norm": 4.529123598027301, + "learning_rate": 8.758138750920577e-06, + "loss": 1.4378, + "step": 3225 + }, + { + "epoch": 0.5536296550540587, + "grad_norm": 3.1250973058988665, + "learning_rate": 8.752623597325752e-06, + "loss": 1.4661, + "step": 3226 + }, + { + "epoch": 0.5538012699502317, + "grad_norm": 3.2530388126661824, + "learning_rate": 8.747108829114284e-06, + "loss": 1.7626, + "step": 3227 + }, + { + "epoch": 0.5539728848464047, + "grad_norm": 4.385300760918265, + "learning_rate": 8.741594447989981e-06, + "loss": 1.6426, + "step": 3228 + }, + { + "epoch": 0.5541444997425776, + "grad_norm": 5.252514122415893, + "learning_rate": 8.736080455656545e-06, + "loss": 1.6596, + "step": 3229 + }, + { + "epoch": 0.5543161146387506, + "grad_norm": 3.227288544320505, + "learning_rate": 8.73056685381755e-06, + "loss": 1.5413, + "step": 3230 + }, + { + "epoch": 0.5544877295349236, + "grad_norm": 3.5311116481839115, + "learning_rate": 8.725053644176456e-06, + "loss": 1.6974, + "step": 3231 + }, + { + "epoch": 0.5546593444310967, + "grad_norm": 3.416505908870069, + "learning_rate": 8.719540828436593e-06, + "loss": 1.7293, + "step": 3232 + }, + { + "epoch": 0.5548309593272696, + "grad_norm": 3.457827629344126, + "learning_rate": 8.714028408301178e-06, + "loss": 1.7514, + "step": 3233 + }, + { + "epoch": 0.5550025742234426, + "grad_norm": 3.145198745548947, + "learning_rate": 8.708516385473298e-06, + "loss": 1.6965, + "step": 3234 + }, + { + "epoch": 0.5551741891196156, + "grad_norm": 2.9496799403656104, + "learning_rate": 8.703004761655918e-06, + "loss": 1.5921, + "step": 3235 + }, + { + "epoch": 0.5553458040157886, + "grad_norm": 3.2901368497468213, + "learning_rate": 8.69749353855189e-06, + "loss": 1.661, + "step": 3236 + }, + { + "epoch": 0.5555174189119616, + "grad_norm": 3.2421031810240613, + "learning_rate": 8.691982717863928e-06, + "loss": 1.4848, + "step": 3237 + }, + { + "epoch": 0.5556890338081345, + "grad_norm": 3.4230864183063145, + "learning_rate": 8.686472301294633e-06, + "loss": 1.5485, + "step": 3238 + }, + { + "epoch": 0.5558606487043075, + "grad_norm": 4.018046537000868, + "learning_rate": 8.680962290546477e-06, + "loss": 1.5272, + "step": 3239 + }, + { + "epoch": 0.5560322636004805, + "grad_norm": 2.9049361300802845, + "learning_rate": 8.6754526873218e-06, + "loss": 1.3446, + "step": 3240 + }, + { + "epoch": 0.5562038784966535, + "grad_norm": 3.341568161747712, + "learning_rate": 8.66994349332283e-06, + "loss": 1.5054, + "step": 3241 + }, + { + "epoch": 0.5563754933928265, + "grad_norm": 3.941577393962539, + "learning_rate": 8.664434710251654e-06, + "loss": 1.6437, + "step": 3242 + }, + { + "epoch": 0.5565471082889994, + "grad_norm": 3.105738291948784, + "learning_rate": 8.658926339810242e-06, + "loss": 1.5786, + "step": 3243 + }, + { + "epoch": 0.5567187231851725, + "grad_norm": 3.074318476874637, + "learning_rate": 8.653418383700433e-06, + "loss": 1.4315, + "step": 3244 + }, + { + "epoch": 0.5568903380813455, + "grad_norm": 3.716842029174973, + "learning_rate": 8.647910843623943e-06, + "loss": 1.7901, + "step": 3245 + }, + { + "epoch": 0.5570619529775185, + "grad_norm": 3.3104958528902366, + "learning_rate": 8.642403721282348e-06, + "loss": 1.6603, + "step": 3246 + }, + { + "epoch": 0.5572335678736914, + "grad_norm": 3.3367371036361493, + "learning_rate": 8.636897018377104e-06, + "loss": 1.6565, + "step": 3247 + }, + { + "epoch": 0.5574051827698644, + "grad_norm": 3.0095529205717897, + "learning_rate": 8.631390736609535e-06, + "loss": 1.345, + "step": 3248 + }, + { + "epoch": 0.5575767976660374, + "grad_norm": 3.2369329555542214, + "learning_rate": 8.625884877680835e-06, + "loss": 1.5876, + "step": 3249 + }, + { + "epoch": 0.5577484125622104, + "grad_norm": 2.926198446720704, + "learning_rate": 8.620379443292068e-06, + "loss": 1.6695, + "step": 3250 + }, + { + "epoch": 0.5579200274583834, + "grad_norm": 3.261907790467902, + "learning_rate": 8.61487443514417e-06, + "loss": 1.6412, + "step": 3251 + }, + { + "epoch": 0.5580916423545563, + "grad_norm": 3.367924205214572, + "learning_rate": 8.609369854937934e-06, + "loss": 1.8815, + "step": 3252 + }, + { + "epoch": 0.5582632572507293, + "grad_norm": 3.034778071252895, + "learning_rate": 8.603865704374032e-06, + "loss": 1.3463, + "step": 3253 + }, + { + "epoch": 0.5584348721469024, + "grad_norm": 3.71398727943537, + "learning_rate": 8.598361985153e-06, + "loss": 1.8752, + "step": 3254 + }, + { + "epoch": 0.5586064870430754, + "grad_norm": 2.901154887769359, + "learning_rate": 8.592858698975241e-06, + "loss": 1.5837, + "step": 3255 + }, + { + "epoch": 0.5587781019392484, + "grad_norm": 4.212818273348361, + "learning_rate": 8.58735584754102e-06, + "loss": 1.7885, + "step": 3256 + }, + { + "epoch": 0.5589497168354213, + "grad_norm": 3.057776985604256, + "learning_rate": 8.58185343255048e-06, + "loss": 1.4712, + "step": 3257 + }, + { + "epoch": 0.5591213317315943, + "grad_norm": 3.2238568437144837, + "learning_rate": 8.576351455703611e-06, + "loss": 1.5019, + "step": 3258 + }, + { + "epoch": 0.5592929466277673, + "grad_norm": 3.7427697564934923, + "learning_rate": 8.570849918700278e-06, + "loss": 1.505, + "step": 3259 + }, + { + "epoch": 0.5594645615239403, + "grad_norm": 3.6361104250789005, + "learning_rate": 8.565348823240214e-06, + "loss": 1.7453, + "step": 3260 + }, + { + "epoch": 0.5596361764201133, + "grad_norm": 3.9375479659544106, + "learning_rate": 8.559848171023006e-06, + "loss": 1.6328, + "step": 3261 + }, + { + "epoch": 0.5598077913162862, + "grad_norm": 3.0915534450178033, + "learning_rate": 8.554347963748113e-06, + "loss": 1.7329, + "step": 3262 + }, + { + "epoch": 0.5599794062124592, + "grad_norm": 2.696302089915293, + "learning_rate": 8.548848203114853e-06, + "loss": 1.4697, + "step": 3263 + }, + { + "epoch": 0.5601510211086322, + "grad_norm": 3.508385094337635, + "learning_rate": 8.543348890822399e-06, + "loss": 1.7017, + "step": 3264 + }, + { + "epoch": 0.5603226360048053, + "grad_norm": 3.297750010295088, + "learning_rate": 8.537850028569796e-06, + "loss": 1.7364, + "step": 3265 + }, + { + "epoch": 0.5604942509009782, + "grad_norm": 3.0568413028715047, + "learning_rate": 8.532351618055944e-06, + "loss": 1.751, + "step": 3266 + }, + { + "epoch": 0.5606658657971512, + "grad_norm": 3.7617092670933165, + "learning_rate": 8.526853660979609e-06, + "loss": 1.8571, + "step": 3267 + }, + { + "epoch": 0.5608374806933242, + "grad_norm": 3.7064590959704242, + "learning_rate": 8.521356159039407e-06, + "loss": 1.5883, + "step": 3268 + }, + { + "epoch": 0.5610090955894972, + "grad_norm": 3.035057604090649, + "learning_rate": 8.515859113933828e-06, + "loss": 1.4653, + "step": 3269 + }, + { + "epoch": 0.5611807104856702, + "grad_norm": 3.4457975765676623, + "learning_rate": 8.510362527361205e-06, + "loss": 1.4828, + "step": 3270 + }, + { + "epoch": 0.5613523253818431, + "grad_norm": 2.87825553417923, + "learning_rate": 8.504866401019738e-06, + "loss": 1.5598, + "step": 3271 + }, + { + "epoch": 0.5615239402780161, + "grad_norm": 3.2036235332688365, + "learning_rate": 8.499370736607486e-06, + "loss": 1.6237, + "step": 3272 + }, + { + "epoch": 0.5616955551741891, + "grad_norm": 3.1444165062997635, + "learning_rate": 8.49387553582236e-06, + "loss": 1.446, + "step": 3273 + }, + { + "epoch": 0.5618671700703621, + "grad_norm": 3.9360624692646167, + "learning_rate": 8.488380800362133e-06, + "loss": 1.6504, + "step": 3274 + }, + { + "epoch": 0.5620387849665351, + "grad_norm": 3.051569389324742, + "learning_rate": 8.482886531924434e-06, + "loss": 1.7111, + "step": 3275 + }, + { + "epoch": 0.562210399862708, + "grad_norm": 3.1815340298388, + "learning_rate": 8.477392732206738e-06, + "loss": 1.723, + "step": 3276 + }, + { + "epoch": 0.562382014758881, + "grad_norm": 3.319420710600917, + "learning_rate": 8.471899402906387e-06, + "loss": 1.8347, + "step": 3277 + }, + { + "epoch": 0.5625536296550541, + "grad_norm": 2.7596499428257726, + "learning_rate": 8.466406545720572e-06, + "loss": 1.5249, + "step": 3278 + }, + { + "epoch": 0.5627252445512271, + "grad_norm": 3.1199004761412286, + "learning_rate": 8.460914162346343e-06, + "loss": 1.7499, + "step": 3279 + }, + { + "epoch": 0.5628968594474001, + "grad_norm": 4.087261083242442, + "learning_rate": 8.455422254480597e-06, + "loss": 1.6515, + "step": 3280 + }, + { + "epoch": 0.563068474343573, + "grad_norm": 4.252625514188772, + "learning_rate": 8.44993082382009e-06, + "loss": 1.5245, + "step": 3281 + }, + { + "epoch": 0.563240089239746, + "grad_norm": 4.070922414145992, + "learning_rate": 8.444439872061424e-06, + "loss": 1.3844, + "step": 3282 + }, + { + "epoch": 0.563411704135919, + "grad_norm": 3.1588998630830547, + "learning_rate": 8.438949400901057e-06, + "loss": 1.4534, + "step": 3283 + }, + { + "epoch": 0.563583319032092, + "grad_norm": 2.9017090307147098, + "learning_rate": 8.4334594120353e-06, + "loss": 1.4493, + "step": 3284 + }, + { + "epoch": 0.5637549339282649, + "grad_norm": 3.2987433412615177, + "learning_rate": 8.427969907160314e-06, + "loss": 1.6449, + "step": 3285 + }, + { + "epoch": 0.5639265488244379, + "grad_norm": 3.2903593388123027, + "learning_rate": 8.42248088797211e-06, + "loss": 1.6136, + "step": 3286 + }, + { + "epoch": 0.564098163720611, + "grad_norm": 3.3124787390674393, + "learning_rate": 8.416992356166549e-06, + "loss": 1.3647, + "step": 3287 + }, + { + "epoch": 0.564269778616784, + "grad_norm": 3.0727836880073385, + "learning_rate": 8.411504313439335e-06, + "loss": 1.4165, + "step": 3288 + }, + { + "epoch": 0.564441393512957, + "grad_norm": 2.58661432877082, + "learning_rate": 8.406016761486034e-06, + "loss": 1.4452, + "step": 3289 + }, + { + "epoch": 0.5646130084091299, + "grad_norm": 3.1164017370539896, + "learning_rate": 8.400529702002048e-06, + "loss": 1.3702, + "step": 3290 + }, + { + "epoch": 0.5647846233053029, + "grad_norm": 3.4437558929796213, + "learning_rate": 8.395043136682638e-06, + "loss": 1.7273, + "step": 3291 + }, + { + "epoch": 0.5649562382014759, + "grad_norm": 3.843491971675913, + "learning_rate": 8.389557067222902e-06, + "loss": 1.5507, + "step": 3292 + }, + { + "epoch": 0.5651278530976489, + "grad_norm": 3.8316096131970228, + "learning_rate": 8.384071495317795e-06, + "loss": 1.4862, + "step": 3293 + }, + { + "epoch": 0.5652994679938219, + "grad_norm": 2.797524656384283, + "learning_rate": 8.378586422662107e-06, + "loss": 1.5572, + "step": 3294 + }, + { + "epoch": 0.5654710828899948, + "grad_norm": 3.212208854813479, + "learning_rate": 8.37310185095048e-06, + "loss": 1.5172, + "step": 3295 + }, + { + "epoch": 0.5656426977861678, + "grad_norm": 3.3748620111847054, + "learning_rate": 8.367617781877403e-06, + "loss": 1.5082, + "step": 3296 + }, + { + "epoch": 0.5658143126823408, + "grad_norm": 5.338641434217973, + "learning_rate": 8.362134217137205e-06, + "loss": 1.375, + "step": 3297 + }, + { + "epoch": 0.5659859275785138, + "grad_norm": 3.439008543231657, + "learning_rate": 8.356651158424064e-06, + "loss": 1.6181, + "step": 3298 + }, + { + "epoch": 0.5661575424746867, + "grad_norm": 3.6946178808177086, + "learning_rate": 8.351168607432e-06, + "loss": 1.614, + "step": 3299 + }, + { + "epoch": 0.5663291573708598, + "grad_norm": 2.619305572926834, + "learning_rate": 8.345686565854869e-06, + "loss": 1.4646, + "step": 3300 + }, + { + "epoch": 0.5665007722670328, + "grad_norm": 6.202803906122431, + "learning_rate": 8.340205035386384e-06, + "loss": 1.7143, + "step": 3301 + }, + { + "epoch": 0.5666723871632058, + "grad_norm": 3.067596412776868, + "learning_rate": 8.334724017720084e-06, + "loss": 1.3359, + "step": 3302 + }, + { + "epoch": 0.5668440020593788, + "grad_norm": 3.055630750770194, + "learning_rate": 8.329243514549365e-06, + "loss": 1.5949, + "step": 3303 + }, + { + "epoch": 0.5670156169555517, + "grad_norm": 5.347367936172074, + "learning_rate": 8.323763527567451e-06, + "loss": 1.6535, + "step": 3304 + }, + { + "epoch": 0.5671872318517247, + "grad_norm": 3.7616366234347898, + "learning_rate": 8.318284058467419e-06, + "loss": 1.497, + "step": 3305 + }, + { + "epoch": 0.5673588467478977, + "grad_norm": 4.113971233573492, + "learning_rate": 8.312805108942172e-06, + "loss": 1.8644, + "step": 3306 + }, + { + "epoch": 0.5675304616440707, + "grad_norm": 3.723546907663054, + "learning_rate": 8.30732668068446e-06, + "loss": 1.4669, + "step": 3307 + }, + { + "epoch": 0.5677020765402437, + "grad_norm": 3.361542419482842, + "learning_rate": 8.301848775386878e-06, + "loss": 1.8408, + "step": 3308 + }, + { + "epoch": 0.5678736914364166, + "grad_norm": 3.8728466920005866, + "learning_rate": 8.296371394741846e-06, + "loss": 1.7269, + "step": 3309 + }, + { + "epoch": 0.5680453063325897, + "grad_norm": 3.6652752949819853, + "learning_rate": 8.290894540441633e-06, + "loss": 1.5592, + "step": 3310 + }, + { + "epoch": 0.5682169212287627, + "grad_norm": 3.2717554817030634, + "learning_rate": 8.285418214178343e-06, + "loss": 1.7266, + "step": 3311 + }, + { + "epoch": 0.5683885361249357, + "grad_norm": 3.2018994610917533, + "learning_rate": 8.279942417643908e-06, + "loss": 1.6679, + "step": 3312 + }, + { + "epoch": 0.5685601510211087, + "grad_norm": 3.2982258673231546, + "learning_rate": 8.27446715253011e-06, + "loss": 1.6281, + "step": 3313 + }, + { + "epoch": 0.5687317659172816, + "grad_norm": 2.8488166862204793, + "learning_rate": 8.268992420528556e-06, + "loss": 1.5427, + "step": 3314 + }, + { + "epoch": 0.5689033808134546, + "grad_norm": 2.840911324814438, + "learning_rate": 8.263518223330698e-06, + "loss": 1.3772, + "step": 3315 + }, + { + "epoch": 0.5690749957096276, + "grad_norm": 3.551932063297759, + "learning_rate": 8.25804456262781e-06, + "loss": 1.436, + "step": 3316 + }, + { + "epoch": 0.5692466106058006, + "grad_norm": 3.259133958841487, + "learning_rate": 8.252571440111018e-06, + "loss": 1.5072, + "step": 3317 + }, + { + "epoch": 0.5694182255019735, + "grad_norm": 2.7811652539634477, + "learning_rate": 8.247098857471262e-06, + "loss": 1.4575, + "step": 3318 + }, + { + "epoch": 0.5695898403981465, + "grad_norm": 3.530250755015589, + "learning_rate": 8.241626816399325e-06, + "loss": 1.4875, + "step": 3319 + }, + { + "epoch": 0.5697614552943195, + "grad_norm": 4.098755890991295, + "learning_rate": 8.236155318585828e-06, + "loss": 1.4714, + "step": 3320 + }, + { + "epoch": 0.5699330701904926, + "grad_norm": 3.5724158873326313, + "learning_rate": 8.230684365721212e-06, + "loss": 1.7572, + "step": 3321 + }, + { + "epoch": 0.5701046850866656, + "grad_norm": 5.678077705744792, + "learning_rate": 8.225213959495762e-06, + "loss": 1.5927, + "step": 3322 + }, + { + "epoch": 0.5702762999828385, + "grad_norm": 3.2149107406497612, + "learning_rate": 8.219744101599588e-06, + "loss": 1.4137, + "step": 3323 + }, + { + "epoch": 0.5704479148790115, + "grad_norm": 3.7739368050624384, + "learning_rate": 8.214274793722622e-06, + "loss": 1.5676, + "step": 3324 + }, + { + "epoch": 0.5706195297751845, + "grad_norm": 3.21248624627995, + "learning_rate": 8.208806037554645e-06, + "loss": 1.5946, + "step": 3325 + }, + { + "epoch": 0.5707911446713575, + "grad_norm": 2.9103287476101745, + "learning_rate": 8.203337834785252e-06, + "loss": 1.5727, + "step": 3326 + }, + { + "epoch": 0.5709627595675305, + "grad_norm": 3.0598952927149305, + "learning_rate": 8.197870187103874e-06, + "loss": 1.607, + "step": 3327 + }, + { + "epoch": 0.5711343744637034, + "grad_norm": 3.0999520615062335, + "learning_rate": 8.192403096199768e-06, + "loss": 1.6799, + "step": 3328 + }, + { + "epoch": 0.5713059893598764, + "grad_norm": 3.6469921226967656, + "learning_rate": 8.186936563762022e-06, + "loss": 1.6337, + "step": 3329 + }, + { + "epoch": 0.5714776042560494, + "grad_norm": 3.1784677481318413, + "learning_rate": 8.181470591479549e-06, + "loss": 1.4632, + "step": 3330 + }, + { + "epoch": 0.5716492191522224, + "grad_norm": 3.1199228835907253, + "learning_rate": 8.176005181041083e-06, + "loss": 1.3285, + "step": 3331 + }, + { + "epoch": 0.5718208340483955, + "grad_norm": 3.5270143050902654, + "learning_rate": 8.170540334135197e-06, + "loss": 1.6874, + "step": 3332 + }, + { + "epoch": 0.5719924489445684, + "grad_norm": 2.7322996284646033, + "learning_rate": 8.165076052450281e-06, + "loss": 1.4953, + "step": 3333 + }, + { + "epoch": 0.5721640638407414, + "grad_norm": 3.2270146724477557, + "learning_rate": 8.159612337674557e-06, + "loss": 1.5549, + "step": 3334 + }, + { + "epoch": 0.5723356787369144, + "grad_norm": 3.6229730060018284, + "learning_rate": 8.154149191496066e-06, + "loss": 1.6531, + "step": 3335 + }, + { + "epoch": 0.5725072936330874, + "grad_norm": 2.721936439367243, + "learning_rate": 8.14868661560267e-06, + "loss": 1.4857, + "step": 3336 + }, + { + "epoch": 0.5726789085292603, + "grad_norm": 3.881874850935796, + "learning_rate": 8.143224611682067e-06, + "loss": 1.7753, + "step": 3337 + }, + { + "epoch": 0.5728505234254333, + "grad_norm": 3.4400240710189616, + "learning_rate": 8.137763181421764e-06, + "loss": 1.6223, + "step": 3338 + }, + { + "epoch": 0.5730221383216063, + "grad_norm": 4.0946563294590135, + "learning_rate": 8.132302326509104e-06, + "loss": 1.6434, + "step": 3339 + }, + { + "epoch": 0.5731937532177793, + "grad_norm": 3.5743568567005046, + "learning_rate": 8.126842048631244e-06, + "loss": 1.6623, + "step": 3340 + }, + { + "epoch": 0.5733653681139523, + "grad_norm": 2.7374286604824705, + "learning_rate": 8.12138234947517e-06, + "loss": 1.4436, + "step": 3341 + }, + { + "epoch": 0.5735369830101252, + "grad_norm": 3.1525846004358136, + "learning_rate": 8.115923230727674e-06, + "loss": 1.5641, + "step": 3342 + }, + { + "epoch": 0.5737085979062982, + "grad_norm": 3.1002395847941906, + "learning_rate": 8.110464694075383e-06, + "loss": 1.6402, + "step": 3343 + }, + { + "epoch": 0.5738802128024713, + "grad_norm": 3.8447138292190424, + "learning_rate": 8.105006741204745e-06, + "loss": 1.623, + "step": 3344 + }, + { + "epoch": 0.5740518276986443, + "grad_norm": 3.041107744210225, + "learning_rate": 8.099549373802014e-06, + "loss": 1.6788, + "step": 3345 + }, + { + "epoch": 0.5742234425948173, + "grad_norm": 2.895810481490591, + "learning_rate": 8.09409259355328e-06, + "loss": 1.4531, + "step": 3346 + }, + { + "epoch": 0.5743950574909902, + "grad_norm": 2.8138667535010353, + "learning_rate": 8.088636402144442e-06, + "loss": 1.6378, + "step": 3347 + }, + { + "epoch": 0.5745666723871632, + "grad_norm": 3.9127809164321623, + "learning_rate": 8.08318080126121e-06, + "loss": 1.6785, + "step": 3348 + }, + { + "epoch": 0.5747382872833362, + "grad_norm": 2.9350546619628197, + "learning_rate": 8.077725792589131e-06, + "loss": 1.5561, + "step": 3349 + }, + { + "epoch": 0.5749099021795092, + "grad_norm": 3.093616739657815, + "learning_rate": 8.072271377813548e-06, + "loss": 1.5094, + "step": 3350 + }, + { + "epoch": 0.5750815170756821, + "grad_norm": 3.1588668453140842, + "learning_rate": 8.06681755861964e-06, + "loss": 1.452, + "step": 3351 + }, + { + "epoch": 0.5752531319718551, + "grad_norm": 3.5317660270907942, + "learning_rate": 8.061364336692385e-06, + "loss": 1.6715, + "step": 3352 + }, + { + "epoch": 0.5754247468680281, + "grad_norm": 3.927847784966835, + "learning_rate": 8.05591171371659e-06, + "loss": 1.6801, + "step": 3353 + }, + { + "epoch": 0.5755963617642011, + "grad_norm": 3.932959669075851, + "learning_rate": 8.050459691376867e-06, + "loss": 1.6584, + "step": 3354 + }, + { + "epoch": 0.5757679766603742, + "grad_norm": 3.39440898968657, + "learning_rate": 8.045008271357644e-06, + "loss": 1.6142, + "step": 3355 + }, + { + "epoch": 0.5759395915565471, + "grad_norm": 3.3611797623361914, + "learning_rate": 8.039557455343172e-06, + "loss": 1.7645, + "step": 3356 + }, + { + "epoch": 0.5761112064527201, + "grad_norm": 3.1520198245820747, + "learning_rate": 8.034107245017501e-06, + "loss": 1.6665, + "step": 3357 + }, + { + "epoch": 0.5762828213488931, + "grad_norm": 2.9756959283566897, + "learning_rate": 8.028657642064509e-06, + "loss": 1.4583, + "step": 3358 + }, + { + "epoch": 0.5764544362450661, + "grad_norm": 3.205094669041891, + "learning_rate": 8.023208648167877e-06, + "loss": 1.5664, + "step": 3359 + }, + { + "epoch": 0.5766260511412391, + "grad_norm": 4.457442925545306, + "learning_rate": 8.017760265011093e-06, + "loss": 1.6954, + "step": 3360 + }, + { + "epoch": 0.576797666037412, + "grad_norm": 3.4469412796953236, + "learning_rate": 8.012312494277473e-06, + "loss": 1.7256, + "step": 3361 + }, + { + "epoch": 0.576969280933585, + "grad_norm": 3.288181659572955, + "learning_rate": 8.006865337650124e-06, + "loss": 1.5143, + "step": 3362 + }, + { + "epoch": 0.577140895829758, + "grad_norm": 3.0880996791979474, + "learning_rate": 8.001418796811981e-06, + "loss": 1.602, + "step": 3363 + }, + { + "epoch": 0.577312510725931, + "grad_norm": 3.7844470543699598, + "learning_rate": 7.995972873445775e-06, + "loss": 1.8454, + "step": 3364 + }, + { + "epoch": 0.577484125622104, + "grad_norm": 3.707990830553102, + "learning_rate": 7.99052756923406e-06, + "loss": 1.6903, + "step": 3365 + }, + { + "epoch": 0.577655740518277, + "grad_norm": 3.102049014407579, + "learning_rate": 7.985082885859184e-06, + "loss": 1.6468, + "step": 3366 + }, + { + "epoch": 0.57782735541445, + "grad_norm": 3.059964517361926, + "learning_rate": 7.979638825003307e-06, + "loss": 1.7285, + "step": 3367 + }, + { + "epoch": 0.577998970310623, + "grad_norm": 3.0880975662217813, + "learning_rate": 7.974195388348406e-06, + "loss": 1.5663, + "step": 3368 + }, + { + "epoch": 0.578170585206796, + "grad_norm": 2.8394638206491982, + "learning_rate": 7.968752577576257e-06, + "loss": 1.5792, + "step": 3369 + }, + { + "epoch": 0.5783422001029689, + "grad_norm": 2.811150670390754, + "learning_rate": 7.963310394368445e-06, + "loss": 1.5458, + "step": 3370 + }, + { + "epoch": 0.5785138149991419, + "grad_norm": 2.757524937848895, + "learning_rate": 7.957868840406362e-06, + "loss": 1.4689, + "step": 3371 + }, + { + "epoch": 0.5786854298953149, + "grad_norm": 3.6727272333159813, + "learning_rate": 7.952427917371198e-06, + "loss": 1.6723, + "step": 3372 + }, + { + "epoch": 0.5788570447914879, + "grad_norm": 3.300240266782386, + "learning_rate": 7.946987626943958e-06, + "loss": 1.6598, + "step": 3373 + }, + { + "epoch": 0.5790286596876609, + "grad_norm": 3.5740827690897934, + "learning_rate": 7.941547970805447e-06, + "loss": 1.6765, + "step": 3374 + }, + { + "epoch": 0.5792002745838338, + "grad_norm": 3.322321882478033, + "learning_rate": 7.936108950636278e-06, + "loss": 1.8564, + "step": 3375 + }, + { + "epoch": 0.5793718894800068, + "grad_norm": 3.4869863184006102, + "learning_rate": 7.930670568116858e-06, + "loss": 1.581, + "step": 3376 + }, + { + "epoch": 0.5795435043761799, + "grad_norm": 3.9038771561881522, + "learning_rate": 7.925232824927411e-06, + "loss": 1.6071, + "step": 3377 + }, + { + "epoch": 0.5797151192723529, + "grad_norm": 2.977760482414295, + "learning_rate": 7.91979572274795e-06, + "loss": 1.6114, + "step": 3378 + }, + { + "epoch": 0.5798867341685259, + "grad_norm": 3.5577265723671947, + "learning_rate": 7.914359263258295e-06, + "loss": 1.8329, + "step": 3379 + }, + { + "epoch": 0.5800583490646988, + "grad_norm": 4.310151594305367, + "learning_rate": 7.908923448138072e-06, + "loss": 1.7581, + "step": 3380 + }, + { + "epoch": 0.5802299639608718, + "grad_norm": 3.532035262433165, + "learning_rate": 7.9034882790667e-06, + "loss": 1.5501, + "step": 3381 + }, + { + "epoch": 0.5804015788570448, + "grad_norm": 3.620376166544094, + "learning_rate": 7.898053757723409e-06, + "loss": 1.793, + "step": 3382 + }, + { + "epoch": 0.5805731937532178, + "grad_norm": 3.6336384949403038, + "learning_rate": 7.892619885787219e-06, + "loss": 1.5072, + "step": 3383 + }, + { + "epoch": 0.5807448086493908, + "grad_norm": 3.431998543591075, + "learning_rate": 7.887186664936945e-06, + "loss": 1.6605, + "step": 3384 + }, + { + "epoch": 0.5809164235455637, + "grad_norm": 3.5446480844974495, + "learning_rate": 7.88175409685122e-06, + "loss": 1.7385, + "step": 3385 + }, + { + "epoch": 0.5810880384417367, + "grad_norm": 3.4525634624061174, + "learning_rate": 7.876322183208456e-06, + "loss": 1.4408, + "step": 3386 + }, + { + "epoch": 0.5812596533379097, + "grad_norm": 3.3098614633002734, + "learning_rate": 7.870890925686875e-06, + "loss": 1.6798, + "step": 3387 + }, + { + "epoch": 0.5814312682340828, + "grad_norm": 4.078781283852957, + "learning_rate": 7.86546032596449e-06, + "loss": 1.5687, + "step": 3388 + }, + { + "epoch": 0.5816028831302557, + "grad_norm": 3.3484219590109903, + "learning_rate": 7.860030385719119e-06, + "loss": 1.6588, + "step": 3389 + }, + { + "epoch": 0.5817744980264287, + "grad_norm": 3.6308744165007396, + "learning_rate": 7.85460110662836e-06, + "loss": 1.4102, + "step": 3390 + }, + { + "epoch": 0.5819461129226017, + "grad_norm": 3.19022682099308, + "learning_rate": 7.849172490369619e-06, + "loss": 1.6959, + "step": 3391 + }, + { + "epoch": 0.5821177278187747, + "grad_norm": 3.694857470540594, + "learning_rate": 7.8437445386201e-06, + "loss": 1.6795, + "step": 3392 + }, + { + "epoch": 0.5822893427149477, + "grad_norm": 4.43058051589044, + "learning_rate": 7.83831725305679e-06, + "loss": 1.5871, + "step": 3393 + }, + { + "epoch": 0.5824609576111206, + "grad_norm": 3.769675050349629, + "learning_rate": 7.832890635356485e-06, + "loss": 1.5692, + "step": 3394 + }, + { + "epoch": 0.5826325725072936, + "grad_norm": 3.158537183662451, + "learning_rate": 7.827464687195762e-06, + "loss": 1.8208, + "step": 3395 + }, + { + "epoch": 0.5828041874034666, + "grad_norm": 3.572121332569598, + "learning_rate": 7.822039410250993e-06, + "loss": 1.8154, + "step": 3396 + }, + { + "epoch": 0.5829758022996396, + "grad_norm": 4.854917811473112, + "learning_rate": 7.816614806198347e-06, + "loss": 1.57, + "step": 3397 + }, + { + "epoch": 0.5831474171958126, + "grad_norm": 3.5679181906198196, + "learning_rate": 7.811190876713783e-06, + "loss": 1.6763, + "step": 3398 + }, + { + "epoch": 0.5833190320919855, + "grad_norm": 3.4176152561497655, + "learning_rate": 7.805767623473055e-06, + "loss": 1.5826, + "step": 3399 + }, + { + "epoch": 0.5834906469881586, + "grad_norm": 3.450280563128342, + "learning_rate": 7.8003450481517e-06, + "loss": 1.5395, + "step": 3400 + }, + { + "epoch": 0.5836622618843316, + "grad_norm": 2.9783290606056356, + "learning_rate": 7.794923152425058e-06, + "loss": 1.4799, + "step": 3401 + }, + { + "epoch": 0.5838338767805046, + "grad_norm": 3.420392915531139, + "learning_rate": 7.789501937968246e-06, + "loss": 1.5855, + "step": 3402 + }, + { + "epoch": 0.5840054916766776, + "grad_norm": 3.1721732430698566, + "learning_rate": 7.784081406456173e-06, + "loss": 1.3177, + "step": 3403 + }, + { + "epoch": 0.5841771065728505, + "grad_norm": 3.2696159478152915, + "learning_rate": 7.778661559563547e-06, + "loss": 1.4352, + "step": 3404 + }, + { + "epoch": 0.5843487214690235, + "grad_norm": 3.207275864667234, + "learning_rate": 7.773242398964851e-06, + "loss": 1.7167, + "step": 3405 + }, + { + "epoch": 0.5845203363651965, + "grad_norm": 3.456157376810977, + "learning_rate": 7.76782392633437e-06, + "loss": 1.6332, + "step": 3406 + }, + { + "epoch": 0.5846919512613695, + "grad_norm": 3.739277129896731, + "learning_rate": 7.76240614334617e-06, + "loss": 1.4852, + "step": 3407 + }, + { + "epoch": 0.5848635661575424, + "grad_norm": 3.5831469186254328, + "learning_rate": 7.756989051674089e-06, + "loss": 1.3861, + "step": 3408 + }, + { + "epoch": 0.5850351810537154, + "grad_norm": 3.4377752647673323, + "learning_rate": 7.751572652991777e-06, + "loss": 1.4595, + "step": 3409 + }, + { + "epoch": 0.5852067959498884, + "grad_norm": 4.00322428815339, + "learning_rate": 7.746156948972654e-06, + "loss": 1.6593, + "step": 3410 + }, + { + "epoch": 0.5853784108460615, + "grad_norm": 3.869745920018589, + "learning_rate": 7.740741941289936e-06, + "loss": 1.5643, + "step": 3411 + }, + { + "epoch": 0.5855500257422345, + "grad_norm": 3.7251467927168904, + "learning_rate": 7.735327631616606e-06, + "loss": 1.6529, + "step": 3412 + }, + { + "epoch": 0.5857216406384074, + "grad_norm": 3.4462693201149777, + "learning_rate": 7.729914021625458e-06, + "loss": 1.57, + "step": 3413 + }, + { + "epoch": 0.5858932555345804, + "grad_norm": 3.3640653043018562, + "learning_rate": 7.724501112989042e-06, + "loss": 1.5265, + "step": 3414 + }, + { + "epoch": 0.5860648704307534, + "grad_norm": 3.698114510919301, + "learning_rate": 7.719088907379705e-06, + "loss": 1.7258, + "step": 3415 + }, + { + "epoch": 0.5862364853269264, + "grad_norm": 2.659080367977588, + "learning_rate": 7.713677406469582e-06, + "loss": 1.5443, + "step": 3416 + }, + { + "epoch": 0.5864081002230994, + "grad_norm": 3.5203753304916994, + "learning_rate": 7.70826661193058e-06, + "loss": 1.7366, + "step": 3417 + }, + { + "epoch": 0.5865797151192723, + "grad_norm": 3.229286781162943, + "learning_rate": 7.702856525434393e-06, + "loss": 1.3902, + "step": 3418 + }, + { + "epoch": 0.5867513300154453, + "grad_norm": 4.196326915897818, + "learning_rate": 7.697447148652499e-06, + "loss": 1.6128, + "step": 3419 + }, + { + "epoch": 0.5869229449116183, + "grad_norm": 3.6892541904442786, + "learning_rate": 7.692038483256141e-06, + "loss": 1.6386, + "step": 3420 + }, + { + "epoch": 0.5870945598077913, + "grad_norm": 3.6859846651088453, + "learning_rate": 7.686630530916366e-06, + "loss": 1.6677, + "step": 3421 + }, + { + "epoch": 0.5872661747039642, + "grad_norm": 3.7829443629407864, + "learning_rate": 7.68122329330398e-06, + "loss": 1.5093, + "step": 3422 + }, + { + "epoch": 0.5874377896001373, + "grad_norm": 3.7666023151534427, + "learning_rate": 7.675816772089585e-06, + "loss": 1.6995, + "step": 3423 + }, + { + "epoch": 0.5876094044963103, + "grad_norm": 3.1284984931329074, + "learning_rate": 7.670410968943548e-06, + "loss": 1.9365, + "step": 3424 + }, + { + "epoch": 0.5877810193924833, + "grad_norm": 3.4108951569360997, + "learning_rate": 7.665005885536023e-06, + "loss": 1.6649, + "step": 3425 + }, + { + "epoch": 0.5879526342886563, + "grad_norm": 3.2690888979237256, + "learning_rate": 7.659601523536936e-06, + "loss": 1.7073, + "step": 3426 + }, + { + "epoch": 0.5881242491848292, + "grad_norm": 3.1997678158040963, + "learning_rate": 7.654197884615991e-06, + "loss": 1.759, + "step": 3427 + }, + { + "epoch": 0.5882958640810022, + "grad_norm": 4.062338122480571, + "learning_rate": 7.648794970442674e-06, + "loss": 1.5379, + "step": 3428 + }, + { + "epoch": 0.5884674789771752, + "grad_norm": 3.2508313105601325, + "learning_rate": 7.643392782686238e-06, + "loss": 1.5833, + "step": 3429 + }, + { + "epoch": 0.5886390938733482, + "grad_norm": 3.293012414249073, + "learning_rate": 7.637991323015722e-06, + "loss": 1.6647, + "step": 3430 + }, + { + "epoch": 0.5888107087695212, + "grad_norm": 2.861860803830219, + "learning_rate": 7.632590593099935e-06, + "loss": 1.5197, + "step": 3431 + }, + { + "epoch": 0.5889823236656941, + "grad_norm": 5.260677730440887, + "learning_rate": 7.627190594607454e-06, + "loss": 1.6689, + "step": 3432 + }, + { + "epoch": 0.5891539385618672, + "grad_norm": 3.7718797264873456, + "learning_rate": 7.621791329206643e-06, + "loss": 1.6436, + "step": 3433 + }, + { + "epoch": 0.5893255534580402, + "grad_norm": 3.5756958234984944, + "learning_rate": 7.616392798565626e-06, + "loss": 1.7052, + "step": 3434 + }, + { + "epoch": 0.5894971683542132, + "grad_norm": 3.3746200657836485, + "learning_rate": 7.610995004352313e-06, + "loss": 1.3705, + "step": 3435 + }, + { + "epoch": 0.5896687832503862, + "grad_norm": 4.025671863494862, + "learning_rate": 7.605597948234377e-06, + "loss": 1.7638, + "step": 3436 + }, + { + "epoch": 0.5898403981465591, + "grad_norm": 3.2096506433161225, + "learning_rate": 7.60020163187927e-06, + "loss": 1.5396, + "step": 3437 + }, + { + "epoch": 0.5900120130427321, + "grad_norm": 3.641887705980669, + "learning_rate": 7.5948060569542085e-06, + "loss": 1.4779, + "step": 3438 + }, + { + "epoch": 0.5901836279389051, + "grad_norm": 5.191420711528766, + "learning_rate": 7.589411225126181e-06, + "loss": 1.4139, + "step": 3439 + }, + { + "epoch": 0.5903552428350781, + "grad_norm": 3.541926464478629, + "learning_rate": 7.584017138061952e-06, + "loss": 1.5846, + "step": 3440 + }, + { + "epoch": 0.590526857731251, + "grad_norm": 3.299448765178323, + "learning_rate": 7.578623797428051e-06, + "loss": 1.7244, + "step": 3441 + }, + { + "epoch": 0.590698472627424, + "grad_norm": 3.2904805276877, + "learning_rate": 7.57323120489078e-06, + "loss": 1.3071, + "step": 3442 + }, + { + "epoch": 0.590870087523597, + "grad_norm": 3.4367547328660417, + "learning_rate": 7.56783936211621e-06, + "loss": 1.4949, + "step": 3443 + }, + { + "epoch": 0.59104170241977, + "grad_norm": 9.696886988735994, + "learning_rate": 7.56244827077017e-06, + "loss": 1.8448, + "step": 3444 + }, + { + "epoch": 0.5912133173159431, + "grad_norm": 3.5865939085980187, + "learning_rate": 7.557057932518274e-06, + "loss": 1.6688, + "step": 3445 + }, + { + "epoch": 0.591384932212116, + "grad_norm": 3.200773303057205, + "learning_rate": 7.551668349025889e-06, + "loss": 1.7322, + "step": 3446 + }, + { + "epoch": 0.591556547108289, + "grad_norm": 3.4087785819785448, + "learning_rate": 7.54627952195816e-06, + "loss": 1.7338, + "step": 3447 + }, + { + "epoch": 0.591728162004462, + "grad_norm": 3.1826063988764064, + "learning_rate": 7.540891452979988e-06, + "loss": 1.6004, + "step": 3448 + }, + { + "epoch": 0.591899776900635, + "grad_norm": 3.394885182311321, + "learning_rate": 7.53550414375605e-06, + "loss": 1.5406, + "step": 3449 + }, + { + "epoch": 0.592071391796808, + "grad_norm": 3.7271835868720076, + "learning_rate": 7.530117595950779e-06, + "loss": 1.5926, + "step": 3450 + }, + { + "epoch": 0.5922430066929809, + "grad_norm": 3.2447524485665995, + "learning_rate": 7.524731811228374e-06, + "loss": 1.4928, + "step": 3451 + }, + { + "epoch": 0.5924146215891539, + "grad_norm": 3.727512721432605, + "learning_rate": 7.5193467912528065e-06, + "loss": 1.6125, + "step": 3452 + }, + { + "epoch": 0.5925862364853269, + "grad_norm": 4.388206109404833, + "learning_rate": 7.513962537687801e-06, + "loss": 1.8134, + "step": 3453 + }, + { + "epoch": 0.5927578513815, + "grad_norm": 3.076136529734564, + "learning_rate": 7.508579052196856e-06, + "loss": 1.4729, + "step": 3454 + }, + { + "epoch": 0.592929466277673, + "grad_norm": 3.090441968248904, + "learning_rate": 7.503196336443226e-06, + "loss": 1.5077, + "step": 3455 + }, + { + "epoch": 0.5931010811738459, + "grad_norm": 3.9151173250992533, + "learning_rate": 7.497814392089921e-06, + "loss": 1.7779, + "step": 3456 + }, + { + "epoch": 0.5932726960700189, + "grad_norm": 4.704923856248117, + "learning_rate": 7.492433220799731e-06, + "loss": 1.7743, + "step": 3457 + }, + { + "epoch": 0.5934443109661919, + "grad_norm": 3.2743200284497607, + "learning_rate": 7.487052824235188e-06, + "loss": 1.4268, + "step": 3458 + }, + { + "epoch": 0.5936159258623649, + "grad_norm": 3.090962800060612, + "learning_rate": 7.481673204058599e-06, + "loss": 1.6117, + "step": 3459 + }, + { + "epoch": 0.5937875407585378, + "grad_norm": 3.2700323717936928, + "learning_rate": 7.476294361932022e-06, + "loss": 1.6494, + "step": 3460 + }, + { + "epoch": 0.5939591556547108, + "grad_norm": 3.5217792540387625, + "learning_rate": 7.470916299517281e-06, + "loss": 1.7862, + "step": 3461 + }, + { + "epoch": 0.5941307705508838, + "grad_norm": 3.958546529354251, + "learning_rate": 7.465539018475954e-06, + "loss": 1.5975, + "step": 3462 + }, + { + "epoch": 0.5943023854470568, + "grad_norm": 3.1667882246080703, + "learning_rate": 7.460162520469379e-06, + "loss": 1.8509, + "step": 3463 + }, + { + "epoch": 0.5944740003432298, + "grad_norm": 3.4084259635303886, + "learning_rate": 7.454786807158654e-06, + "loss": 1.7569, + "step": 3464 + }, + { + "epoch": 0.5946456152394027, + "grad_norm": 3.4134509716830643, + "learning_rate": 7.449411880204629e-06, + "loss": 1.4558, + "step": 3465 + }, + { + "epoch": 0.5948172301355757, + "grad_norm": 4.9449335423127065, + "learning_rate": 7.444037741267923e-06, + "loss": 1.4374, + "step": 3466 + }, + { + "epoch": 0.5949888450317488, + "grad_norm": 3.1289839977494096, + "learning_rate": 7.438664392008903e-06, + "loss": 1.6245, + "step": 3467 + }, + { + "epoch": 0.5951604599279218, + "grad_norm": 3.44472725161979, + "learning_rate": 7.4332918340876834e-06, + "loss": 1.6394, + "step": 3468 + }, + { + "epoch": 0.5953320748240948, + "grad_norm": 4.035093330546387, + "learning_rate": 7.427920069164154e-06, + "loss": 1.657, + "step": 3469 + }, + { + "epoch": 0.5955036897202677, + "grad_norm": 3.064410566000389, + "learning_rate": 7.422549098897942e-06, + "loss": 1.6928, + "step": 3470 + }, + { + "epoch": 0.5956753046164407, + "grad_norm": 3.6946001978065284, + "learning_rate": 7.417178924948442e-06, + "loss": 1.6947, + "step": 3471 + }, + { + "epoch": 0.5958469195126137, + "grad_norm": 3.364638705208248, + "learning_rate": 7.411809548974792e-06, + "loss": 1.4862, + "step": 3472 + }, + { + "epoch": 0.5960185344087867, + "grad_norm": 3.1498637832725933, + "learning_rate": 7.406440972635893e-06, + "loss": 1.5784, + "step": 3473 + }, + { + "epoch": 0.5961901493049596, + "grad_norm": 3.3343464296396386, + "learning_rate": 7.401073197590394e-06, + "loss": 1.6015, + "step": 3474 + }, + { + "epoch": 0.5963617642011326, + "grad_norm": 3.3871116094083633, + "learning_rate": 7.39570622549669e-06, + "loss": 1.6587, + "step": 3475 + }, + { + "epoch": 0.5965333790973056, + "grad_norm": 2.961481816908742, + "learning_rate": 7.390340058012942e-06, + "loss": 1.4583, + "step": 3476 + }, + { + "epoch": 0.5967049939934786, + "grad_norm": 3.0947770440813476, + "learning_rate": 7.384974696797051e-06, + "loss": 1.4569, + "step": 3477 + }, + { + "epoch": 0.5968766088896517, + "grad_norm": 3.7040062861178584, + "learning_rate": 7.3796101435066744e-06, + "loss": 1.7992, + "step": 3478 + }, + { + "epoch": 0.5970482237858246, + "grad_norm": 3.5236170954512605, + "learning_rate": 7.374246399799219e-06, + "loss": 1.5144, + "step": 3479 + }, + { + "epoch": 0.5972198386819976, + "grad_norm": 3.4929880815605396, + "learning_rate": 7.368883467331842e-06, + "loss": 1.4671, + "step": 3480 + }, + { + "epoch": 0.5973914535781706, + "grad_norm": 2.999185898087423, + "learning_rate": 7.3635213477614465e-06, + "loss": 1.5863, + "step": 3481 + }, + { + "epoch": 0.5975630684743436, + "grad_norm": 3.2343396749847932, + "learning_rate": 7.358160042744684e-06, + "loss": 1.4851, + "step": 3482 + }, + { + "epoch": 0.5977346833705166, + "grad_norm": 3.578878332256923, + "learning_rate": 7.352799553937966e-06, + "loss": 1.6599, + "step": 3483 + }, + { + "epoch": 0.5979062982666895, + "grad_norm": 2.8562673552416773, + "learning_rate": 7.347439882997434e-06, + "loss": 1.5198, + "step": 3484 + }, + { + "epoch": 0.5980779131628625, + "grad_norm": 3.370512546047015, + "learning_rate": 7.342081031578992e-06, + "loss": 1.4607, + "step": 3485 + }, + { + "epoch": 0.5982495280590355, + "grad_norm": 3.6359043106964695, + "learning_rate": 7.336723001338286e-06, + "loss": 1.8209, + "step": 3486 + }, + { + "epoch": 0.5984211429552085, + "grad_norm": 3.9482934730495627, + "learning_rate": 7.331365793930698e-06, + "loss": 1.6941, + "step": 3487 + }, + { + "epoch": 0.5985927578513816, + "grad_norm": 2.9820353600099168, + "learning_rate": 7.326009411011373e-06, + "loss": 1.6194, + "step": 3488 + }, + { + "epoch": 0.5987643727475545, + "grad_norm": 3.317409323950506, + "learning_rate": 7.3206538542351895e-06, + "loss": 1.6654, + "step": 3489 + }, + { + "epoch": 0.5989359876437275, + "grad_norm": 3.845945578759833, + "learning_rate": 7.3152991252567765e-06, + "loss": 1.6838, + "step": 3490 + }, + { + "epoch": 0.5991076025399005, + "grad_norm": 3.301791598658535, + "learning_rate": 7.309945225730502e-06, + "loss": 1.2897, + "step": 3491 + }, + { + "epoch": 0.5992792174360735, + "grad_norm": 3.268855205397025, + "learning_rate": 7.3045921573104864e-06, + "loss": 1.6067, + "step": 3492 + }, + { + "epoch": 0.5994508323322464, + "grad_norm": 2.86378842637836, + "learning_rate": 7.299239921650583e-06, + "loss": 1.3309, + "step": 3493 + }, + { + "epoch": 0.5996224472284194, + "grad_norm": 3.3354861696207196, + "learning_rate": 7.29388852040439e-06, + "loss": 1.6204, + "step": 3494 + }, + { + "epoch": 0.5997940621245924, + "grad_norm": 3.3282166085597242, + "learning_rate": 7.288537955225257e-06, + "loss": 1.5746, + "step": 3495 + }, + { + "epoch": 0.5999656770207654, + "grad_norm": 3.246799848306246, + "learning_rate": 7.283188227766262e-06, + "loss": 1.5323, + "step": 3496 + }, + { + "epoch": 0.6001372919169384, + "grad_norm": 4.2661936554700395, + "learning_rate": 7.2778393396802374e-06, + "loss": 1.6849, + "step": 3497 + }, + { + "epoch": 0.6003089068131113, + "grad_norm": 3.5978067353137058, + "learning_rate": 7.272491292619747e-06, + "loss": 1.7872, + "step": 3498 + }, + { + "epoch": 0.6004805217092843, + "grad_norm": 2.8790891648592285, + "learning_rate": 7.267144088237095e-06, + "loss": 1.5264, + "step": 3499 + }, + { + "epoch": 0.6006521366054574, + "grad_norm": 3.120079277745528, + "learning_rate": 7.261797728184331e-06, + "loss": 1.5583, + "step": 3500 + }, + { + "epoch": 0.6008237515016304, + "grad_norm": 2.844945630046783, + "learning_rate": 7.256452214113237e-06, + "loss": 1.4873, + "step": 3501 + }, + { + "epoch": 0.6009953663978034, + "grad_norm": 2.902828187514857, + "learning_rate": 7.251107547675341e-06, + "loss": 1.6206, + "step": 3502 + }, + { + "epoch": 0.6011669812939763, + "grad_norm": 3.673835737909017, + "learning_rate": 7.2457637305219016e-06, + "loss": 1.7337, + "step": 3503 + }, + { + "epoch": 0.6013385961901493, + "grad_norm": 3.394777346119734, + "learning_rate": 7.2404207643039236e-06, + "loss": 1.7251, + "step": 3504 + }, + { + "epoch": 0.6015102110863223, + "grad_norm": 3.4967505570620254, + "learning_rate": 7.235078650672141e-06, + "loss": 1.6618, + "step": 3505 + }, + { + "epoch": 0.6016818259824953, + "grad_norm": 3.5981557137725413, + "learning_rate": 7.2297373912770215e-06, + "loss": 1.4374, + "step": 3506 + }, + { + "epoch": 0.6018534408786683, + "grad_norm": 3.011963170535395, + "learning_rate": 7.224396987768785e-06, + "loss": 1.4514, + "step": 3507 + }, + { + "epoch": 0.6020250557748412, + "grad_norm": 3.018142268614947, + "learning_rate": 7.219057441797368e-06, + "loss": 1.4997, + "step": 3508 + }, + { + "epoch": 0.6021966706710142, + "grad_norm": 3.590709438173592, + "learning_rate": 7.213718755012457e-06, + "loss": 1.6282, + "step": 3509 + }, + { + "epoch": 0.6023682855671872, + "grad_norm": 3.2080284817637956, + "learning_rate": 7.208380929063466e-06, + "loss": 1.6915, + "step": 3510 + }, + { + "epoch": 0.6025399004633603, + "grad_norm": 3.181286690715623, + "learning_rate": 7.203043965599536e-06, + "loss": 1.693, + "step": 3511 + }, + { + "epoch": 0.6027115153595332, + "grad_norm": 3.7303874760906677, + "learning_rate": 7.19770786626956e-06, + "loss": 1.7308, + "step": 3512 + }, + { + "epoch": 0.6028831302557062, + "grad_norm": 3.042777443835479, + "learning_rate": 7.1923726327221445e-06, + "loss": 1.433, + "step": 3513 + }, + { + "epoch": 0.6030547451518792, + "grad_norm": 2.9409896196468863, + "learning_rate": 7.1870382666056435e-06, + "loss": 1.5407, + "step": 3514 + }, + { + "epoch": 0.6032263600480522, + "grad_norm": 3.014341162078118, + "learning_rate": 7.181704769568134e-06, + "loss": 1.6793, + "step": 3515 + }, + { + "epoch": 0.6033979749442252, + "grad_norm": 3.1991872243003296, + "learning_rate": 7.176372143257432e-06, + "loss": 1.5248, + "step": 3516 + }, + { + "epoch": 0.6035695898403981, + "grad_norm": 3.316494157050678, + "learning_rate": 7.171040389321073e-06, + "loss": 1.6392, + "step": 3517 + }, + { + "epoch": 0.6037412047365711, + "grad_norm": 3.1072107684826062, + "learning_rate": 7.165709509406331e-06, + "loss": 1.4747, + "step": 3518 + }, + { + "epoch": 0.6039128196327441, + "grad_norm": 2.9216102514259785, + "learning_rate": 7.160379505160211e-06, + "loss": 1.56, + "step": 3519 + }, + { + "epoch": 0.6040844345289171, + "grad_norm": 3.7776870278156567, + "learning_rate": 7.155050378229445e-06, + "loss": 1.6509, + "step": 3520 + }, + { + "epoch": 0.6042560494250901, + "grad_norm": 3.584135123962141, + "learning_rate": 7.149722130260495e-06, + "loss": 1.634, + "step": 3521 + }, + { + "epoch": 0.604427664321263, + "grad_norm": 3.54520310095583, + "learning_rate": 7.144394762899549e-06, + "loss": 1.7492, + "step": 3522 + }, + { + "epoch": 0.6045992792174361, + "grad_norm": 3.636361126058969, + "learning_rate": 7.139068277792524e-06, + "loss": 1.4987, + "step": 3523 + }, + { + "epoch": 0.6047708941136091, + "grad_norm": 3.3488301752712246, + "learning_rate": 7.133742676585067e-06, + "loss": 1.6574, + "step": 3524 + }, + { + "epoch": 0.6049425090097821, + "grad_norm": 3.5978190190308705, + "learning_rate": 7.128417960922546e-06, + "loss": 1.5293, + "step": 3525 + }, + { + "epoch": 0.605114123905955, + "grad_norm": 3.459716421354361, + "learning_rate": 7.123094132450063e-06, + "loss": 1.4582, + "step": 3526 + }, + { + "epoch": 0.605285738802128, + "grad_norm": 3.5727578192190648, + "learning_rate": 7.117771192812441e-06, + "loss": 1.6297, + "step": 3527 + }, + { + "epoch": 0.605457353698301, + "grad_norm": 3.498299317220624, + "learning_rate": 7.112449143654235e-06, + "loss": 1.6427, + "step": 3528 + }, + { + "epoch": 0.605628968594474, + "grad_norm": 2.9682993796259725, + "learning_rate": 7.107127986619711e-06, + "loss": 1.4381, + "step": 3529 + }, + { + "epoch": 0.605800583490647, + "grad_norm": 3.4931398822266395, + "learning_rate": 7.1018077233528685e-06, + "loss": 1.5446, + "step": 3530 + }, + { + "epoch": 0.6059721983868199, + "grad_norm": 3.449594681051599, + "learning_rate": 7.096488355497435e-06, + "loss": 1.6931, + "step": 3531 + }, + { + "epoch": 0.6061438132829929, + "grad_norm": 3.158346584260765, + "learning_rate": 7.091169884696853e-06, + "loss": 1.4981, + "step": 3532 + }, + { + "epoch": 0.606315428179166, + "grad_norm": 4.636648985828793, + "learning_rate": 7.0858523125942944e-06, + "loss": 1.6703, + "step": 3533 + }, + { + "epoch": 0.606487043075339, + "grad_norm": 4.466582317728687, + "learning_rate": 7.080535640832651e-06, + "loss": 1.8682, + "step": 3534 + }, + { + "epoch": 0.606658657971512, + "grad_norm": 3.745305060606846, + "learning_rate": 7.075219871054528e-06, + "loss": 1.3963, + "step": 3535 + }, + { + "epoch": 0.6068302728676849, + "grad_norm": 4.193770175075539, + "learning_rate": 7.069905004902269e-06, + "loss": 1.8452, + "step": 3536 + }, + { + "epoch": 0.6070018877638579, + "grad_norm": 3.207187857707066, + "learning_rate": 7.0645910440179234e-06, + "loss": 1.5548, + "step": 3537 + }, + { + "epoch": 0.6071735026600309, + "grad_norm": 3.7115883826094227, + "learning_rate": 7.059277990043272e-06, + "loss": 1.4893, + "step": 3538 + }, + { + "epoch": 0.6073451175562039, + "grad_norm": 3.1517930045316978, + "learning_rate": 7.0539658446198036e-06, + "loss": 1.597, + "step": 3539 + }, + { + "epoch": 0.6075167324523769, + "grad_norm": 3.251598528124382, + "learning_rate": 7.048654609388741e-06, + "loss": 1.5905, + "step": 3540 + }, + { + "epoch": 0.6076883473485498, + "grad_norm": 3.193389143061349, + "learning_rate": 7.043344285991012e-06, + "loss": 1.4434, + "step": 3541 + }, + { + "epoch": 0.6078599622447228, + "grad_norm": 3.7575111217500345, + "learning_rate": 7.0380348760672655e-06, + "loss": 1.75, + "step": 3542 + }, + { + "epoch": 0.6080315771408958, + "grad_norm": 3.278457805476395, + "learning_rate": 7.032726381257879e-06, + "loss": 1.5966, + "step": 3543 + }, + { + "epoch": 0.6082031920370689, + "grad_norm": 3.1176551026544987, + "learning_rate": 7.027418803202931e-06, + "loss": 1.6025, + "step": 3544 + }, + { + "epoch": 0.6083748069332418, + "grad_norm": 3.148546222023553, + "learning_rate": 7.022112143542232e-06, + "loss": 1.5148, + "step": 3545 + }, + { + "epoch": 0.6085464218294148, + "grad_norm": 3.7597591439723845, + "learning_rate": 7.016806403915302e-06, + "loss": 1.667, + "step": 3546 + }, + { + "epoch": 0.6087180367255878, + "grad_norm": 3.1000399357350132, + "learning_rate": 7.011501585961369e-06, + "loss": 1.4906, + "step": 3547 + }, + { + "epoch": 0.6088896516217608, + "grad_norm": 3.678846588145142, + "learning_rate": 7.00619769131939e-06, + "loss": 1.5873, + "step": 3548 + }, + { + "epoch": 0.6090612665179338, + "grad_norm": 3.5467011930611023, + "learning_rate": 7.000894721628027e-06, + "loss": 1.4743, + "step": 3549 + }, + { + "epoch": 0.6092328814141067, + "grad_norm": 4.573899462988421, + "learning_rate": 6.995592678525662e-06, + "loss": 1.5757, + "step": 3550 + }, + { + "epoch": 0.6094044963102797, + "grad_norm": 3.853856004027449, + "learning_rate": 6.990291563650387e-06, + "loss": 1.618, + "step": 3551 + }, + { + "epoch": 0.6095761112064527, + "grad_norm": 4.770867604145773, + "learning_rate": 6.9849913786400154e-06, + "loss": 1.5727, + "step": 3552 + }, + { + "epoch": 0.6097477261026257, + "grad_norm": 3.3813302617365864, + "learning_rate": 6.979692125132058e-06, + "loss": 1.6511, + "step": 3553 + }, + { + "epoch": 0.6099193409987987, + "grad_norm": 3.4948128644776006, + "learning_rate": 6.974393804763746e-06, + "loss": 1.5985, + "step": 3554 + }, + { + "epoch": 0.6100909558949716, + "grad_norm": 3.919573765566504, + "learning_rate": 6.96909641917203e-06, + "loss": 1.699, + "step": 3555 + }, + { + "epoch": 0.6102625707911447, + "grad_norm": 3.588520580473983, + "learning_rate": 6.963799969993557e-06, + "loss": 1.6282, + "step": 3556 + }, + { + "epoch": 0.6104341856873177, + "grad_norm": 3.6443422963335164, + "learning_rate": 6.958504458864699e-06, + "loss": 1.4209, + "step": 3557 + }, + { + "epoch": 0.6106058005834907, + "grad_norm": 3.5832394828873095, + "learning_rate": 6.953209887421531e-06, + "loss": 1.434, + "step": 3558 + }, + { + "epoch": 0.6107774154796637, + "grad_norm": 3.0194882536944556, + "learning_rate": 6.947916257299829e-06, + "loss": 1.329, + "step": 3559 + }, + { + "epoch": 0.6109490303758366, + "grad_norm": 2.8369981346213526, + "learning_rate": 6.9426235701350975e-06, + "loss": 1.3983, + "step": 3560 + }, + { + "epoch": 0.6111206452720096, + "grad_norm": 4.3766950840191186, + "learning_rate": 6.937331827562532e-06, + "loss": 1.4592, + "step": 3561 + }, + { + "epoch": 0.6112922601681826, + "grad_norm": 3.575947492900013, + "learning_rate": 6.932041031217051e-06, + "loss": 1.5822, + "step": 3562 + }, + { + "epoch": 0.6114638750643556, + "grad_norm": 3.8254188579891353, + "learning_rate": 6.926751182733264e-06, + "loss": 1.4851, + "step": 3563 + }, + { + "epoch": 0.6116354899605285, + "grad_norm": 3.770091050093029, + "learning_rate": 6.92146228374551e-06, + "loss": 1.6089, + "step": 3564 + }, + { + "epoch": 0.6118071048567015, + "grad_norm": 3.972773931503509, + "learning_rate": 6.91617433588781e-06, + "loss": 1.5105, + "step": 3565 + }, + { + "epoch": 0.6119787197528745, + "grad_norm": 3.071128027261779, + "learning_rate": 6.910887340793904e-06, + "loss": 1.4183, + "step": 3566 + }, + { + "epoch": 0.6121503346490476, + "grad_norm": 4.232389848123197, + "learning_rate": 6.905601300097241e-06, + "loss": 1.5972, + "step": 3567 + }, + { + "epoch": 0.6123219495452206, + "grad_norm": 3.7256258521185432, + "learning_rate": 6.900316215430967e-06, + "loss": 1.7475, + "step": 3568 + }, + { + "epoch": 0.6124935644413935, + "grad_norm": 3.576751242900159, + "learning_rate": 6.8950320884279375e-06, + "loss": 1.733, + "step": 3569 + }, + { + "epoch": 0.6126651793375665, + "grad_norm": 3.366578413225391, + "learning_rate": 6.889748920720714e-06, + "loss": 1.7823, + "step": 3570 + }, + { + "epoch": 0.6128367942337395, + "grad_norm": 3.144677461459144, + "learning_rate": 6.88446671394155e-06, + "loss": 1.4366, + "step": 3571 + }, + { + "epoch": 0.6130084091299125, + "grad_norm": 3.327116844473996, + "learning_rate": 6.879185469722417e-06, + "loss": 1.4711, + "step": 3572 + }, + { + "epoch": 0.6131800240260855, + "grad_norm": 2.873522966046271, + "learning_rate": 6.873905189694979e-06, + "loss": 1.4945, + "step": 3573 + }, + { + "epoch": 0.6133516389222584, + "grad_norm": 3.385729227599632, + "learning_rate": 6.86862587549061e-06, + "loss": 1.7169, + "step": 3574 + }, + { + "epoch": 0.6135232538184314, + "grad_norm": 3.659100526585139, + "learning_rate": 6.863347528740376e-06, + "loss": 1.4637, + "step": 3575 + }, + { + "epoch": 0.6136948687146044, + "grad_norm": 3.5871880291928337, + "learning_rate": 6.858070151075057e-06, + "loss": 1.5778, + "step": 3576 + }, + { + "epoch": 0.6138664836107774, + "grad_norm": 3.4965527291808742, + "learning_rate": 6.8527937441251195e-06, + "loss": 1.6727, + "step": 3577 + }, + { + "epoch": 0.6140380985069505, + "grad_norm": 3.180628062774472, + "learning_rate": 6.847518309520734e-06, + "loss": 1.55, + "step": 3578 + }, + { + "epoch": 0.6142097134031234, + "grad_norm": 4.125068719067522, + "learning_rate": 6.842243848891781e-06, + "loss": 1.8915, + "step": 3579 + }, + { + "epoch": 0.6143813282992964, + "grad_norm": 2.822402815553282, + "learning_rate": 6.836970363867825e-06, + "loss": 1.4799, + "step": 3580 + }, + { + "epoch": 0.6145529431954694, + "grad_norm": 4.91866492123016, + "learning_rate": 6.831697856078143e-06, + "loss": 1.6284, + "step": 3581 + }, + { + "epoch": 0.6147245580916424, + "grad_norm": 4.034526444138595, + "learning_rate": 6.826426327151703e-06, + "loss": 1.4852, + "step": 3582 + }, + { + "epoch": 0.6148961729878153, + "grad_norm": 3.9752131268320428, + "learning_rate": 6.821155778717164e-06, + "loss": 1.7132, + "step": 3583 + }, + { + "epoch": 0.6150677878839883, + "grad_norm": 3.7212114180900366, + "learning_rate": 6.8158862124028935e-06, + "loss": 1.6357, + "step": 3584 + }, + { + "epoch": 0.6152394027801613, + "grad_norm": 3.3607635813613133, + "learning_rate": 6.81061762983695e-06, + "loss": 1.5627, + "step": 3585 + }, + { + "epoch": 0.6154110176763343, + "grad_norm": 3.102328408443979, + "learning_rate": 6.805350032647091e-06, + "loss": 1.1952, + "step": 3586 + }, + { + "epoch": 0.6155826325725073, + "grad_norm": 3.3889154530800862, + "learning_rate": 6.800083422460766e-06, + "loss": 1.5259, + "step": 3587 + }, + { + "epoch": 0.6157542474686802, + "grad_norm": 4.278932537075945, + "learning_rate": 6.794817800905126e-06, + "loss": 1.5404, + "step": 3588 + }, + { + "epoch": 0.6159258623648532, + "grad_norm": 4.498275024820423, + "learning_rate": 6.789553169607005e-06, + "loss": 1.904, + "step": 3589 + }, + { + "epoch": 0.6160974772610263, + "grad_norm": 3.6648361183917317, + "learning_rate": 6.78428953019294e-06, + "loss": 1.4874, + "step": 3590 + }, + { + "epoch": 0.6162690921571993, + "grad_norm": 4.730235381280263, + "learning_rate": 6.779026884289161e-06, + "loss": 1.5854, + "step": 3591 + }, + { + "epoch": 0.6164407070533723, + "grad_norm": 4.039251415789363, + "learning_rate": 6.7737652335215875e-06, + "loss": 1.6877, + "step": 3592 + }, + { + "epoch": 0.6166123219495452, + "grad_norm": 2.949797540673822, + "learning_rate": 6.7685045795158366e-06, + "loss": 1.5218, + "step": 3593 + }, + { + "epoch": 0.6167839368457182, + "grad_norm": 3.1462755989389546, + "learning_rate": 6.763244923897214e-06, + "loss": 1.66, + "step": 3594 + }, + { + "epoch": 0.6169555517418912, + "grad_norm": 8.790351768451915, + "learning_rate": 6.757986268290713e-06, + "loss": 1.6265, + "step": 3595 + }, + { + "epoch": 0.6171271666380642, + "grad_norm": 3.625955100783232, + "learning_rate": 6.752728614321027e-06, + "loss": 1.7215, + "step": 3596 + }, + { + "epoch": 0.6172987815342371, + "grad_norm": 3.7287328863114855, + "learning_rate": 6.747471963612533e-06, + "loss": 1.5203, + "step": 3597 + }, + { + "epoch": 0.6174703964304101, + "grad_norm": 3.0202138281623854, + "learning_rate": 6.742216317789301e-06, + "loss": 1.378, + "step": 3598 + }, + { + "epoch": 0.6176420113265831, + "grad_norm": 4.725003682769307, + "learning_rate": 6.7369616784750865e-06, + "loss": 1.8272, + "step": 3599 + }, + { + "epoch": 0.6178136262227562, + "grad_norm": 3.6525662232026535, + "learning_rate": 6.731708047293347e-06, + "loss": 1.4771, + "step": 3600 + }, + { + "epoch": 0.6179852411189292, + "grad_norm": 3.0653076577483764, + "learning_rate": 6.726455425867209e-06, + "loss": 1.6039, + "step": 3601 + }, + { + "epoch": 0.6181568560151021, + "grad_norm": 3.546807468213588, + "learning_rate": 6.721203815819499e-06, + "loss": 1.4522, + "step": 3602 + }, + { + "epoch": 0.6183284709112751, + "grad_norm": 3.231561608886287, + "learning_rate": 6.715953218772731e-06, + "loss": 1.5448, + "step": 3603 + }, + { + "epoch": 0.6185000858074481, + "grad_norm": 4.255214525889074, + "learning_rate": 6.710703636349102e-06, + "loss": 1.4211, + "step": 3604 + }, + { + "epoch": 0.6186717007036211, + "grad_norm": 3.5580390080097235, + "learning_rate": 6.7054550701705e-06, + "loss": 1.8038, + "step": 3605 + }, + { + "epoch": 0.6188433155997941, + "grad_norm": 3.980774314848651, + "learning_rate": 6.700207521858497e-06, + "loss": 1.7348, + "step": 3606 + }, + { + "epoch": 0.619014930495967, + "grad_norm": 3.838505218644239, + "learning_rate": 6.694960993034345e-06, + "loss": 1.8029, + "step": 3607 + }, + { + "epoch": 0.61918654539214, + "grad_norm": 3.55897323339006, + "learning_rate": 6.689715485318991e-06, + "loss": 1.6498, + "step": 3608 + }, + { + "epoch": 0.619358160288313, + "grad_norm": 3.6483570376923558, + "learning_rate": 6.684471000333057e-06, + "loss": 1.7088, + "step": 3609 + }, + { + "epoch": 0.619529775184486, + "grad_norm": 3.286987247453582, + "learning_rate": 6.679227539696859e-06, + "loss": 1.6491, + "step": 3610 + }, + { + "epoch": 0.619701390080659, + "grad_norm": 3.6550228488817593, + "learning_rate": 6.673985105030386e-06, + "loss": 1.6154, + "step": 3611 + }, + { + "epoch": 0.619873004976832, + "grad_norm": 3.6965410509474026, + "learning_rate": 6.668743697953322e-06, + "loss": 1.4387, + "step": 3612 + }, + { + "epoch": 0.620044619873005, + "grad_norm": 3.8054483669238683, + "learning_rate": 6.663503320085021e-06, + "loss": 1.7772, + "step": 3613 + }, + { + "epoch": 0.620216234769178, + "grad_norm": 4.100195875464454, + "learning_rate": 6.658263973044522e-06, + "loss": 1.8859, + "step": 3614 + }, + { + "epoch": 0.620387849665351, + "grad_norm": 2.8411303765212006, + "learning_rate": 6.653025658450556e-06, + "loss": 1.6475, + "step": 3615 + }, + { + "epoch": 0.6205594645615239, + "grad_norm": 3.738410759369591, + "learning_rate": 6.64778837792152e-06, + "loss": 1.5985, + "step": 3616 + }, + { + "epoch": 0.6207310794576969, + "grad_norm": 4.222578233471034, + "learning_rate": 6.642552133075506e-06, + "loss": 1.4064, + "step": 3617 + }, + { + "epoch": 0.6209026943538699, + "grad_norm": 3.2683299982967164, + "learning_rate": 6.637316925530275e-06, + "loss": 1.442, + "step": 3618 + }, + { + "epoch": 0.6210743092500429, + "grad_norm": 3.5968973542926514, + "learning_rate": 6.6320827569032684e-06, + "loss": 1.7, + "step": 3619 + }, + { + "epoch": 0.6212459241462159, + "grad_norm": 3.431879397853006, + "learning_rate": 6.626849628811613e-06, + "loss": 1.7326, + "step": 3620 + }, + { + "epoch": 0.6214175390423888, + "grad_norm": 3.3466457830514402, + "learning_rate": 6.62161754287211e-06, + "loss": 1.6379, + "step": 3621 + }, + { + "epoch": 0.6215891539385618, + "grad_norm": 3.585286521571043, + "learning_rate": 6.61638650070124e-06, + "loss": 1.6252, + "step": 3622 + }, + { + "epoch": 0.6217607688347349, + "grad_norm": 3.441388396163768, + "learning_rate": 6.611156503915158e-06, + "loss": 1.5805, + "step": 3623 + }, + { + "epoch": 0.6219323837309079, + "grad_norm": 2.8788988398472104, + "learning_rate": 6.605927554129705e-06, + "loss": 1.4655, + "step": 3624 + }, + { + "epoch": 0.6221039986270809, + "grad_norm": 4.097672213672383, + "learning_rate": 6.600699652960383e-06, + "loss": 1.704, + "step": 3625 + }, + { + "epoch": 0.6222756135232538, + "grad_norm": 3.8053155913992014, + "learning_rate": 6.595472802022382e-06, + "loss": 1.6123, + "step": 3626 + }, + { + "epoch": 0.6224472284194268, + "grad_norm": 2.9413464367244906, + "learning_rate": 6.590247002930567e-06, + "loss": 1.5142, + "step": 3627 + }, + { + "epoch": 0.6226188433155998, + "grad_norm": 3.7481112593904973, + "learning_rate": 6.585022257299473e-06, + "loss": 1.552, + "step": 3628 + }, + { + "epoch": 0.6227904582117728, + "grad_norm": 3.9403199597742726, + "learning_rate": 6.579798566743314e-06, + "loss": 1.3722, + "step": 3629 + }, + { + "epoch": 0.6229620731079458, + "grad_norm": 3.5741659718144856, + "learning_rate": 6.574575932875977e-06, + "loss": 1.6998, + "step": 3630 + }, + { + "epoch": 0.6231336880041187, + "grad_norm": 3.2833445182141214, + "learning_rate": 6.569354357311015e-06, + "loss": 1.625, + "step": 3631 + }, + { + "epoch": 0.6233053029002917, + "grad_norm": 3.3823846836668863, + "learning_rate": 6.564133841661667e-06, + "loss": 1.5147, + "step": 3632 + }, + { + "epoch": 0.6234769177964647, + "grad_norm": 3.722953347222163, + "learning_rate": 6.558914387540835e-06, + "loss": 1.8322, + "step": 3633 + }, + { + "epoch": 0.6236485326926378, + "grad_norm": 3.2166245726465883, + "learning_rate": 6.5536959965611e-06, + "loss": 1.68, + "step": 3634 + }, + { + "epoch": 0.6238201475888107, + "grad_norm": 3.616491143354469, + "learning_rate": 6.548478670334706e-06, + "loss": 1.8139, + "step": 3635 + }, + { + "epoch": 0.6239917624849837, + "grad_norm": 3.7048168605553258, + "learning_rate": 6.543262410473579e-06, + "loss": 1.7876, + "step": 3636 + }, + { + "epoch": 0.6241633773811567, + "grad_norm": 3.842558187299423, + "learning_rate": 6.538047218589303e-06, + "loss": 1.4199, + "step": 3637 + }, + { + "epoch": 0.6243349922773297, + "grad_norm": 3.494201520092776, + "learning_rate": 6.532833096293139e-06, + "loss": 1.5375, + "step": 3638 + }, + { + "epoch": 0.6245066071735027, + "grad_norm": 3.217061689816585, + "learning_rate": 6.527620045196019e-06, + "loss": 1.6631, + "step": 3639 + }, + { + "epoch": 0.6246782220696756, + "grad_norm": 3.6928714876781243, + "learning_rate": 6.522408066908541e-06, + "loss": 1.408, + "step": 3640 + }, + { + "epoch": 0.6248498369658486, + "grad_norm": 3.952689029118906, + "learning_rate": 6.517197163040974e-06, + "loss": 1.5684, + "step": 3641 + }, + { + "epoch": 0.6250214518620216, + "grad_norm": 3.6043387599734373, + "learning_rate": 6.511987335203256e-06, + "loss": 1.6327, + "step": 3642 + }, + { + "epoch": 0.6251930667581946, + "grad_norm": 3.7043748833197077, + "learning_rate": 6.506778585004982e-06, + "loss": 1.4229, + "step": 3643 + }, + { + "epoch": 0.6253646816543676, + "grad_norm": 3.261278120490368, + "learning_rate": 6.501570914055428e-06, + "loss": 1.6165, + "step": 3644 + }, + { + "epoch": 0.6255362965505405, + "grad_norm": 3.669892061644329, + "learning_rate": 6.496364323963528e-06, + "loss": 1.8881, + "step": 3645 + }, + { + "epoch": 0.6257079114467136, + "grad_norm": 3.465479602926153, + "learning_rate": 6.491158816337889e-06, + "loss": 1.3925, + "step": 3646 + }, + { + "epoch": 0.6258795263428866, + "grad_norm": 3.4571562419806408, + "learning_rate": 6.485954392786775e-06, + "loss": 1.4355, + "step": 3647 + }, + { + "epoch": 0.6260511412390596, + "grad_norm": 3.869149989507204, + "learning_rate": 6.480751054918124e-06, + "loss": 1.7249, + "step": 3648 + }, + { + "epoch": 0.6262227561352325, + "grad_norm": 4.523040760674294, + "learning_rate": 6.475548804339529e-06, + "loss": 1.6034, + "step": 3649 + }, + { + "epoch": 0.6263943710314055, + "grad_norm": 3.67809048459109, + "learning_rate": 6.4703476426582525e-06, + "loss": 1.61, + "step": 3650 + }, + { + "epoch": 0.6265659859275785, + "grad_norm": 3.7439101481056194, + "learning_rate": 6.465147571481223e-06, + "loss": 1.4205, + "step": 3651 + }, + { + "epoch": 0.6267376008237515, + "grad_norm": 3.823136874827558, + "learning_rate": 6.459948592415027e-06, + "loss": 1.622, + "step": 3652 + }, + { + "epoch": 0.6269092157199245, + "grad_norm": 3.518818248937889, + "learning_rate": 6.454750707065919e-06, + "loss": 1.3603, + "step": 3653 + }, + { + "epoch": 0.6270808306160974, + "grad_norm": 3.263262581086908, + "learning_rate": 6.449553917039811e-06, + "loss": 1.4911, + "step": 3654 + }, + { + "epoch": 0.6272524455122704, + "grad_norm": 4.004275786512978, + "learning_rate": 6.4443582239422744e-06, + "loss": 1.6386, + "step": 3655 + }, + { + "epoch": 0.6274240604084435, + "grad_norm": 3.5166375079723893, + "learning_rate": 6.439163629378549e-06, + "loss": 1.6907, + "step": 3656 + }, + { + "epoch": 0.6275956753046165, + "grad_norm": 3.1762171216125656, + "learning_rate": 6.4339701349535296e-06, + "loss": 1.444, + "step": 3657 + }, + { + "epoch": 0.6277672902007895, + "grad_norm": 3.564924980964853, + "learning_rate": 6.428777742271776e-06, + "loss": 1.7593, + "step": 3658 + }, + { + "epoch": 0.6279389050969624, + "grad_norm": 4.009595801412592, + "learning_rate": 6.4235864529375005e-06, + "loss": 1.639, + "step": 3659 + }, + { + "epoch": 0.6281105199931354, + "grad_norm": 3.382801063046933, + "learning_rate": 6.4183962685545845e-06, + "loss": 1.3942, + "step": 3660 + }, + { + "epoch": 0.6282821348893084, + "grad_norm": 3.0578832312616093, + "learning_rate": 6.413207190726556e-06, + "loss": 1.555, + "step": 3661 + }, + { + "epoch": 0.6284537497854814, + "grad_norm": 3.462684674440041, + "learning_rate": 6.4080192210566085e-06, + "loss": 1.6732, + "step": 3662 + }, + { + "epoch": 0.6286253646816544, + "grad_norm": 3.0427530879325726, + "learning_rate": 6.402832361147595e-06, + "loss": 1.429, + "step": 3663 + }, + { + "epoch": 0.6287969795778273, + "grad_norm": 3.3477560926201955, + "learning_rate": 6.397646612602017e-06, + "loss": 1.5817, + "step": 3664 + }, + { + "epoch": 0.6289685944740003, + "grad_norm": 3.170673179673043, + "learning_rate": 6.392461977022044e-06, + "loss": 1.585, + "step": 3665 + }, + { + "epoch": 0.6291402093701733, + "grad_norm": 3.7790798834673214, + "learning_rate": 6.3872784560095e-06, + "loss": 1.4983, + "step": 3666 + }, + { + "epoch": 0.6293118242663464, + "grad_norm": 3.126668507127899, + "learning_rate": 6.382096051165847e-06, + "loss": 1.6057, + "step": 3667 + }, + { + "epoch": 0.6294834391625193, + "grad_norm": 3.2705280565880854, + "learning_rate": 6.3769147640922254e-06, + "loss": 1.2736, + "step": 3668 + }, + { + "epoch": 0.6296550540586923, + "grad_norm": 3.225755690959349, + "learning_rate": 6.371734596389415e-06, + "loss": 1.4795, + "step": 3669 + }, + { + "epoch": 0.6298266689548653, + "grad_norm": 4.128931141771984, + "learning_rate": 6.36655554965786e-06, + "loss": 1.4248, + "step": 3670 + }, + { + "epoch": 0.6299982838510383, + "grad_norm": 2.8665491318890024, + "learning_rate": 6.361377625497651e-06, + "loss": 1.5379, + "step": 3671 + }, + { + "epoch": 0.6301698987472113, + "grad_norm": 3.391287368139855, + "learning_rate": 6.356200825508538e-06, + "loss": 1.5778, + "step": 3672 + }, + { + "epoch": 0.6303415136433842, + "grad_norm": 3.2451638968289136, + "learning_rate": 6.351025151289915e-06, + "loss": 1.5252, + "step": 3673 + }, + { + "epoch": 0.6305131285395572, + "grad_norm": 3.437817930831985, + "learning_rate": 6.345850604440833e-06, + "loss": 1.587, + "step": 3674 + }, + { + "epoch": 0.6306847434357302, + "grad_norm": 4.4183841791637954, + "learning_rate": 6.340677186559998e-06, + "loss": 1.5805, + "step": 3675 + }, + { + "epoch": 0.6308563583319032, + "grad_norm": 4.337792287117245, + "learning_rate": 6.335504899245761e-06, + "loss": 1.5193, + "step": 3676 + }, + { + "epoch": 0.6310279732280762, + "grad_norm": 3.5091697528556445, + "learning_rate": 6.330333744096131e-06, + "loss": 1.6965, + "step": 3677 + }, + { + "epoch": 0.6311995881242491, + "grad_norm": 3.6592210897623647, + "learning_rate": 6.3251637227087645e-06, + "loss": 1.3187, + "step": 3678 + }, + { + "epoch": 0.6313712030204222, + "grad_norm": 3.4214428461130377, + "learning_rate": 6.319994836680957e-06, + "loss": 1.5261, + "step": 3679 + }, + { + "epoch": 0.6315428179165952, + "grad_norm": 3.9302958773314693, + "learning_rate": 6.3148270876096695e-06, + "loss": 1.6667, + "step": 3680 + }, + { + "epoch": 0.6317144328127682, + "grad_norm": 3.881326719868973, + "learning_rate": 6.309660477091501e-06, + "loss": 1.5373, + "step": 3681 + }, + { + "epoch": 0.6318860477089412, + "grad_norm": 4.872974155548524, + "learning_rate": 6.3044950067227066e-06, + "loss": 1.7324, + "step": 3682 + }, + { + "epoch": 0.6320576626051141, + "grad_norm": 5.07238964319186, + "learning_rate": 6.299330678099182e-06, + "loss": 1.7323, + "step": 3683 + }, + { + "epoch": 0.6322292775012871, + "grad_norm": 3.9692110166764167, + "learning_rate": 6.294167492816476e-06, + "loss": 1.6193, + "step": 3684 + }, + { + "epoch": 0.6324008923974601, + "grad_norm": 3.1700893639605647, + "learning_rate": 6.289005452469778e-06, + "loss": 1.3325, + "step": 3685 + }, + { + "epoch": 0.6325725072936331, + "grad_norm": 3.416355454492311, + "learning_rate": 6.2838445586539255e-06, + "loss": 1.5966, + "step": 3686 + }, + { + "epoch": 0.632744122189806, + "grad_norm": 3.791320115429006, + "learning_rate": 6.278684812963407e-06, + "loss": 1.3722, + "step": 3687 + }, + { + "epoch": 0.632915737085979, + "grad_norm": 3.1000746339948617, + "learning_rate": 6.2735262169923495e-06, + "loss": 1.5722, + "step": 3688 + }, + { + "epoch": 0.633087351982152, + "grad_norm": 3.6052364477817123, + "learning_rate": 6.2683687723345295e-06, + "loss": 1.6687, + "step": 3689 + }, + { + "epoch": 0.6332589668783251, + "grad_norm": 4.18193478632042, + "learning_rate": 6.263212480583372e-06, + "loss": 1.7141, + "step": 3690 + }, + { + "epoch": 0.6334305817744981, + "grad_norm": 3.6988980422128104, + "learning_rate": 6.258057343331927e-06, + "loss": 1.5213, + "step": 3691 + }, + { + "epoch": 0.633602196670671, + "grad_norm": 3.7801072635505193, + "learning_rate": 6.252903362172911e-06, + "loss": 1.7459, + "step": 3692 + }, + { + "epoch": 0.633773811566844, + "grad_norm": 3.748680941484881, + "learning_rate": 6.2477505386986655e-06, + "loss": 1.9347, + "step": 3693 + }, + { + "epoch": 0.633945426463017, + "grad_norm": 3.863835905110704, + "learning_rate": 6.242598874501187e-06, + "loss": 1.6168, + "step": 3694 + }, + { + "epoch": 0.63411704135919, + "grad_norm": 3.7547104864174883, + "learning_rate": 6.237448371172108e-06, + "loss": 1.7518, + "step": 3695 + }, + { + "epoch": 0.634288656255363, + "grad_norm": 3.7202810068051186, + "learning_rate": 6.2322990303027055e-06, + "loss": 1.4998, + "step": 3696 + }, + { + "epoch": 0.6344602711515359, + "grad_norm": 5.1686129810012735, + "learning_rate": 6.2271508534838885e-06, + "loss": 1.7796, + "step": 3697 + }, + { + "epoch": 0.6346318860477089, + "grad_norm": 3.8366474035647844, + "learning_rate": 6.222003842306214e-06, + "loss": 1.7047, + "step": 3698 + }, + { + "epoch": 0.6348035009438819, + "grad_norm": 4.00011838855578, + "learning_rate": 6.21685799835988e-06, + "loss": 1.5354, + "step": 3699 + }, + { + "epoch": 0.634975115840055, + "grad_norm": 3.8097320904639966, + "learning_rate": 6.2117133232347225e-06, + "loss": 1.5618, + "step": 3700 + }, + { + "epoch": 0.6351467307362278, + "grad_norm": 3.3871116041841103, + "learning_rate": 6.206569818520212e-06, + "loss": 1.6155, + "step": 3701 + }, + { + "epoch": 0.6353183456324009, + "grad_norm": 3.613384562896955, + "learning_rate": 6.201427485805469e-06, + "loss": 1.3645, + "step": 3702 + }, + { + "epoch": 0.6354899605285739, + "grad_norm": 3.3617147891575136, + "learning_rate": 6.196286326679231e-06, + "loss": 1.487, + "step": 3703 + }, + { + "epoch": 0.6356615754247469, + "grad_norm": 3.9168910731786566, + "learning_rate": 6.191146342729892e-06, + "loss": 1.5568, + "step": 3704 + }, + { + "epoch": 0.6358331903209199, + "grad_norm": 3.931999100186654, + "learning_rate": 6.186007535545475e-06, + "loss": 1.5467, + "step": 3705 + }, + { + "epoch": 0.6360048052170928, + "grad_norm": 3.252701382926673, + "learning_rate": 6.1808699067136425e-06, + "loss": 1.5966, + "step": 3706 + }, + { + "epoch": 0.6361764201132658, + "grad_norm": 3.983194022487637, + "learning_rate": 6.175733457821691e-06, + "loss": 1.7826, + "step": 3707 + }, + { + "epoch": 0.6363480350094388, + "grad_norm": 4.114251383059512, + "learning_rate": 6.170598190456556e-06, + "loss": 1.3341, + "step": 3708 + }, + { + "epoch": 0.6365196499056118, + "grad_norm": 3.9162467459067525, + "learning_rate": 6.1654641062047996e-06, + "loss": 1.7739, + "step": 3709 + }, + { + "epoch": 0.6366912648017848, + "grad_norm": 3.218901458927811, + "learning_rate": 6.160331206652624e-06, + "loss": 1.6653, + "step": 3710 + }, + { + "epoch": 0.6368628796979577, + "grad_norm": 3.5083626587042596, + "learning_rate": 6.155199493385864e-06, + "loss": 1.6322, + "step": 3711 + }, + { + "epoch": 0.6370344945941308, + "grad_norm": 3.6412562653893294, + "learning_rate": 6.150068967989995e-06, + "loss": 1.6263, + "step": 3712 + }, + { + "epoch": 0.6372061094903038, + "grad_norm": 4.315141381195538, + "learning_rate": 6.144939632050112e-06, + "loss": 1.6946, + "step": 3713 + }, + { + "epoch": 0.6373777243864768, + "grad_norm": 3.874185437103271, + "learning_rate": 6.13981148715096e-06, + "loss": 1.7088, + "step": 3714 + }, + { + "epoch": 0.6375493392826498, + "grad_norm": 3.076904863041038, + "learning_rate": 6.134684534876892e-06, + "loss": 1.4948, + "step": 3715 + }, + { + "epoch": 0.6377209541788227, + "grad_norm": 3.911491755708042, + "learning_rate": 6.129558776811915e-06, + "loss": 1.3543, + "step": 3716 + }, + { + "epoch": 0.6378925690749957, + "grad_norm": 4.320289096329126, + "learning_rate": 6.124434214539654e-06, + "loss": 1.5979, + "step": 3717 + }, + { + "epoch": 0.6380641839711687, + "grad_norm": 3.5407349273503086, + "learning_rate": 6.119310849643371e-06, + "loss": 1.7499, + "step": 3718 + }, + { + "epoch": 0.6382357988673417, + "grad_norm": 3.4988565958795466, + "learning_rate": 6.114188683705959e-06, + "loss": 1.6295, + "step": 3719 + }, + { + "epoch": 0.6384074137635146, + "grad_norm": 3.2751617424176196, + "learning_rate": 6.109067718309936e-06, + "loss": 1.3878, + "step": 3720 + }, + { + "epoch": 0.6385790286596876, + "grad_norm": 4.0274202279877835, + "learning_rate": 6.103947955037447e-06, + "loss": 1.7432, + "step": 3721 + }, + { + "epoch": 0.6387506435558606, + "grad_norm": 3.412700859198055, + "learning_rate": 6.098829395470269e-06, + "loss": 1.7594, + "step": 3722 + }, + { + "epoch": 0.6389222584520337, + "grad_norm": 3.6947286015485927, + "learning_rate": 6.09371204118981e-06, + "loss": 1.611, + "step": 3723 + }, + { + "epoch": 0.6390938733482067, + "grad_norm": 3.4723923501632163, + "learning_rate": 6.088595893777105e-06, + "loss": 1.5917, + "step": 3724 + }, + { + "epoch": 0.6392654882443796, + "grad_norm": 3.810015283025792, + "learning_rate": 6.083480954812809e-06, + "loss": 1.6391, + "step": 3725 + }, + { + "epoch": 0.6394371031405526, + "grad_norm": 4.316265361954921, + "learning_rate": 6.078367225877217e-06, + "loss": 1.432, + "step": 3726 + }, + { + "epoch": 0.6396087180367256, + "grad_norm": 3.6392790149117564, + "learning_rate": 6.073254708550231e-06, + "loss": 1.5334, + "step": 3727 + }, + { + "epoch": 0.6397803329328986, + "grad_norm": 4.5333545093331775, + "learning_rate": 6.068143404411395e-06, + "loss": 1.6917, + "step": 3728 + }, + { + "epoch": 0.6399519478290716, + "grad_norm": 3.6116840105698556, + "learning_rate": 6.063033315039873e-06, + "loss": 1.6131, + "step": 3729 + }, + { + "epoch": 0.6401235627252445, + "grad_norm": 3.997257841992301, + "learning_rate": 6.057924442014451e-06, + "loss": 1.6189, + "step": 3730 + }, + { + "epoch": 0.6402951776214175, + "grad_norm": 3.0261820059168856, + "learning_rate": 6.052816786913547e-06, + "loss": 1.4005, + "step": 3731 + }, + { + "epoch": 0.6404667925175905, + "grad_norm": 3.735924258990418, + "learning_rate": 6.047710351315195e-06, + "loss": 1.5999, + "step": 3732 + }, + { + "epoch": 0.6406384074137635, + "grad_norm": 3.871643151320293, + "learning_rate": 6.042605136797051e-06, + "loss": 1.8283, + "step": 3733 + }, + { + "epoch": 0.6408100223099366, + "grad_norm": 3.91113298025619, + "learning_rate": 6.037501144936395e-06, + "loss": 1.4033, + "step": 3734 + }, + { + "epoch": 0.6409816372061095, + "grad_norm": 3.1636064280197838, + "learning_rate": 6.032398377310139e-06, + "loss": 1.4198, + "step": 3735 + }, + { + "epoch": 0.6411532521022825, + "grad_norm": 3.3514879313978954, + "learning_rate": 6.0272968354948065e-06, + "loss": 1.4831, + "step": 3736 + }, + { + "epoch": 0.6413248669984555, + "grad_norm": 3.3447151992771893, + "learning_rate": 6.022196521066545e-06, + "loss": 1.2749, + "step": 3737 + }, + { + "epoch": 0.6414964818946285, + "grad_norm": 5.139484632492181, + "learning_rate": 6.017097435601127e-06, + "loss": 1.6103, + "step": 3738 + }, + { + "epoch": 0.6416680967908014, + "grad_norm": 3.6517191457366662, + "learning_rate": 6.0119995806739316e-06, + "loss": 1.5468, + "step": 3739 + }, + { + "epoch": 0.6418397116869744, + "grad_norm": 3.2394809338420085, + "learning_rate": 6.006902957859973e-06, + "loss": 1.5817, + "step": 3740 + }, + { + "epoch": 0.6420113265831474, + "grad_norm": 3.098384107595828, + "learning_rate": 6.00180756873388e-06, + "loss": 1.411, + "step": 3741 + }, + { + "epoch": 0.6421829414793204, + "grad_norm": 3.489325450387917, + "learning_rate": 5.996713414869895e-06, + "loss": 1.6147, + "step": 3742 + }, + { + "epoch": 0.6423545563754934, + "grad_norm": 2.6642365943225137, + "learning_rate": 5.991620497841889e-06, + "loss": 1.2264, + "step": 3743 + }, + { + "epoch": 0.6425261712716663, + "grad_norm": 3.6552380419471473, + "learning_rate": 5.986528819223344e-06, + "loss": 1.4133, + "step": 3744 + }, + { + "epoch": 0.6426977861678393, + "grad_norm": 4.197809834211433, + "learning_rate": 5.981438380587355e-06, + "loss": 1.6674, + "step": 3745 + }, + { + "epoch": 0.6428694010640124, + "grad_norm": 3.418750052935898, + "learning_rate": 5.976349183506643e-06, + "loss": 1.5798, + "step": 3746 + }, + { + "epoch": 0.6430410159601854, + "grad_norm": 3.813957763526605, + "learning_rate": 5.9712612295535375e-06, + "loss": 1.5, + "step": 3747 + }, + { + "epoch": 0.6432126308563584, + "grad_norm": 4.043553827055629, + "learning_rate": 5.966174520299996e-06, + "loss": 1.5963, + "step": 3748 + }, + { + "epoch": 0.6433842457525313, + "grad_norm": 3.1059224258429685, + "learning_rate": 5.961089057317575e-06, + "loss": 1.4502, + "step": 3749 + }, + { + "epoch": 0.6435558606487043, + "grad_norm": 3.254189456622494, + "learning_rate": 5.956004842177466e-06, + "loss": 1.6232, + "step": 3750 + }, + { + "epoch": 0.6437274755448773, + "grad_norm": 4.343900909263778, + "learning_rate": 5.95092187645045e-06, + "loss": 1.7305, + "step": 3751 + }, + { + "epoch": 0.6438990904410503, + "grad_norm": 3.83175622660869, + "learning_rate": 5.94584016170694e-06, + "loss": 1.5942, + "step": 3752 + }, + { + "epoch": 0.6440707053372233, + "grad_norm": 3.828478919426302, + "learning_rate": 5.9407596995169615e-06, + "loss": 1.422, + "step": 3753 + }, + { + "epoch": 0.6442423202333962, + "grad_norm": 3.60135097849645, + "learning_rate": 5.935680491450144e-06, + "loss": 1.7458, + "step": 3754 + }, + { + "epoch": 0.6444139351295692, + "grad_norm": 3.7429744387296577, + "learning_rate": 5.930602539075741e-06, + "loss": 1.5854, + "step": 3755 + }, + { + "epoch": 0.6445855500257422, + "grad_norm": 4.34838921481234, + "learning_rate": 5.9255258439626115e-06, + "loss": 1.6082, + "step": 3756 + }, + { + "epoch": 0.6447571649219153, + "grad_norm": 3.4027586087568307, + "learning_rate": 5.920450407679219e-06, + "loss": 1.3763, + "step": 3757 + }, + { + "epoch": 0.6449287798180882, + "grad_norm": 3.348098033984385, + "learning_rate": 5.915376231793654e-06, + "loss": 1.4189, + "step": 3758 + }, + { + "epoch": 0.6451003947142612, + "grad_norm": 3.7797218768853753, + "learning_rate": 5.910303317873603e-06, + "loss": 1.6864, + "step": 3759 + }, + { + "epoch": 0.6452720096104342, + "grad_norm": 3.573339340544784, + "learning_rate": 5.905231667486373e-06, + "loss": 1.6512, + "step": 3760 + }, + { + "epoch": 0.6454436245066072, + "grad_norm": 3.4938775035989518, + "learning_rate": 5.900161282198874e-06, + "loss": 1.4509, + "step": 3761 + }, + { + "epoch": 0.6456152394027802, + "grad_norm": 3.7614261762484764, + "learning_rate": 5.8950921635776334e-06, + "loss": 1.5003, + "step": 3762 + }, + { + "epoch": 0.6457868542989531, + "grad_norm": 3.32744800427139, + "learning_rate": 5.890024313188774e-06, + "loss": 1.4281, + "step": 3763 + }, + { + "epoch": 0.6459584691951261, + "grad_norm": 4.215982281593581, + "learning_rate": 5.884957732598036e-06, + "loss": 1.836, + "step": 3764 + }, + { + "epoch": 0.6461300840912991, + "grad_norm": 3.554349093520539, + "learning_rate": 5.879892423370768e-06, + "loss": 1.5159, + "step": 3765 + }, + { + "epoch": 0.6463016989874721, + "grad_norm": 3.170941512623266, + "learning_rate": 5.874828387071919e-06, + "loss": 1.3565, + "step": 3766 + }, + { + "epoch": 0.6464733138836452, + "grad_norm": 4.046913398441757, + "learning_rate": 5.869765625266055e-06, + "loss": 1.5758, + "step": 3767 + }, + { + "epoch": 0.646644928779818, + "grad_norm": 4.004195100902059, + "learning_rate": 5.864704139517341e-06, + "loss": 1.7527, + "step": 3768 + }, + { + "epoch": 0.6468165436759911, + "grad_norm": 3.8295756915763732, + "learning_rate": 5.8596439313895445e-06, + "loss": 1.8035, + "step": 3769 + }, + { + "epoch": 0.6469881585721641, + "grad_norm": 3.294116541785648, + "learning_rate": 5.854585002446047e-06, + "loss": 1.6663, + "step": 3770 + }, + { + "epoch": 0.6471597734683371, + "grad_norm": 4.0980814948586355, + "learning_rate": 5.849527354249827e-06, + "loss": 1.6069, + "step": 3771 + }, + { + "epoch": 0.64733138836451, + "grad_norm": 3.378433148526006, + "learning_rate": 5.844470988363475e-06, + "loss": 1.4084, + "step": 3772 + }, + { + "epoch": 0.647503003260683, + "grad_norm": 3.9355387344465473, + "learning_rate": 5.839415906349178e-06, + "loss": 1.4713, + "step": 3773 + }, + { + "epoch": 0.647674618156856, + "grad_norm": 4.192212967061259, + "learning_rate": 5.834362109768737e-06, + "loss": 1.5389, + "step": 3774 + }, + { + "epoch": 0.647846233053029, + "grad_norm": 3.305185233657875, + "learning_rate": 5.829309600183536e-06, + "loss": 1.589, + "step": 3775 + }, + { + "epoch": 0.648017847949202, + "grad_norm": 3.133777330040181, + "learning_rate": 5.82425837915458e-06, + "loss": 1.5038, + "step": 3776 + }, + { + "epoch": 0.6481894628453749, + "grad_norm": 3.9107724160631836, + "learning_rate": 5.81920844824247e-06, + "loss": 1.375, + "step": 3777 + }, + { + "epoch": 0.6483610777415479, + "grad_norm": 3.8374469638962587, + "learning_rate": 5.814159809007414e-06, + "loss": 1.2163, + "step": 3778 + }, + { + "epoch": 0.648532692637721, + "grad_norm": 3.9245893513525694, + "learning_rate": 5.809112463009203e-06, + "loss": 1.6612, + "step": 3779 + }, + { + "epoch": 0.648704307533894, + "grad_norm": 3.3793029710745315, + "learning_rate": 5.804066411807252e-06, + "loss": 1.4323, + "step": 3780 + }, + { + "epoch": 0.648875922430067, + "grad_norm": 5.7499704582100755, + "learning_rate": 5.799021656960555e-06, + "loss": 1.9008, + "step": 3781 + }, + { + "epoch": 0.6490475373262399, + "grad_norm": 4.145319289603786, + "learning_rate": 5.793978200027719e-06, + "loss": 1.5552, + "step": 3782 + }, + { + "epoch": 0.6492191522224129, + "grad_norm": 3.5239460063024635, + "learning_rate": 5.78893604256695e-06, + "loss": 1.7757, + "step": 3783 + }, + { + "epoch": 0.6493907671185859, + "grad_norm": 4.241472784053818, + "learning_rate": 5.78389518613604e-06, + "loss": 1.5683, + "step": 3784 + }, + { + "epoch": 0.6495623820147589, + "grad_norm": 3.5956903412864656, + "learning_rate": 5.778855632292393e-06, + "loss": 1.4185, + "step": 3785 + }, + { + "epoch": 0.6497339969109319, + "grad_norm": 3.34291404104676, + "learning_rate": 5.773817382593008e-06, + "loss": 1.7483, + "step": 3786 + }, + { + "epoch": 0.6499056118071048, + "grad_norm": 3.576626686486406, + "learning_rate": 5.768780438594469e-06, + "loss": 1.5932, + "step": 3787 + }, + { + "epoch": 0.6500772267032778, + "grad_norm": 3.660442575443035, + "learning_rate": 5.763744801852976e-06, + "loss": 1.3911, + "step": 3788 + }, + { + "epoch": 0.6502488415994508, + "grad_norm": 4.271863341690556, + "learning_rate": 5.758710473924305e-06, + "loss": 1.6114, + "step": 3789 + }, + { + "epoch": 0.6504204564956239, + "grad_norm": 4.1106358150249935, + "learning_rate": 5.753677456363844e-06, + "loss": 1.5902, + "step": 3790 + }, + { + "epoch": 0.6505920713917968, + "grad_norm": 4.054778537435686, + "learning_rate": 5.748645750726567e-06, + "loss": 1.4673, + "step": 3791 + }, + { + "epoch": 0.6507636862879698, + "grad_norm": 3.8012734490915245, + "learning_rate": 5.743615358567051e-06, + "loss": 1.7016, + "step": 3792 + }, + { + "epoch": 0.6509353011841428, + "grad_norm": 3.606284344280702, + "learning_rate": 5.738586281439455e-06, + "loss": 1.569, + "step": 3793 + }, + { + "epoch": 0.6511069160803158, + "grad_norm": 3.507653202505188, + "learning_rate": 5.733558520897539e-06, + "loss": 1.7072, + "step": 3794 + }, + { + "epoch": 0.6512785309764888, + "grad_norm": 4.191460744400435, + "learning_rate": 5.728532078494655e-06, + "loss": 1.6124, + "step": 3795 + }, + { + "epoch": 0.6514501458726617, + "grad_norm": 4.41163625688169, + "learning_rate": 5.72350695578375e-06, + "loss": 1.6001, + "step": 3796 + }, + { + "epoch": 0.6516217607688347, + "grad_norm": 3.643930792663315, + "learning_rate": 5.718483154317365e-06, + "loss": 1.5156, + "step": 3797 + }, + { + "epoch": 0.6517933756650077, + "grad_norm": 4.110379256687413, + "learning_rate": 5.713460675647626e-06, + "loss": 1.6141, + "step": 3798 + }, + { + "epoch": 0.6519649905611807, + "grad_norm": 3.5174963074233845, + "learning_rate": 5.70843952132625e-06, + "loss": 1.6481, + "step": 3799 + }, + { + "epoch": 0.6521366054573537, + "grad_norm": 3.995495285273367, + "learning_rate": 5.70341969290455e-06, + "loss": 1.5012, + "step": 3800 + }, + { + "epoch": 0.6523082203535266, + "grad_norm": 3.354203830219037, + "learning_rate": 5.698401191933431e-06, + "loss": 1.5803, + "step": 3801 + }, + { + "epoch": 0.6524798352496997, + "grad_norm": 4.361345608317878, + "learning_rate": 5.693384019963386e-06, + "loss": 1.6315, + "step": 3802 + }, + { + "epoch": 0.6526514501458727, + "grad_norm": 3.5559420376132778, + "learning_rate": 5.68836817854449e-06, + "loss": 1.4097, + "step": 3803 + }, + { + "epoch": 0.6528230650420457, + "grad_norm": 8.442507747433941, + "learning_rate": 5.6833536692264204e-06, + "loss": 1.571, + "step": 3804 + }, + { + "epoch": 0.6529946799382187, + "grad_norm": 3.3647883521484743, + "learning_rate": 5.678340493558427e-06, + "loss": 1.5608, + "step": 3805 + }, + { + "epoch": 0.6531662948343916, + "grad_norm": 3.861798907269382, + "learning_rate": 5.67332865308936e-06, + "loss": 1.4725, + "step": 3806 + }, + { + "epoch": 0.6533379097305646, + "grad_norm": 3.2557878562209672, + "learning_rate": 5.66831814936766e-06, + "loss": 1.4398, + "step": 3807 + }, + { + "epoch": 0.6535095246267376, + "grad_norm": 3.2104530763784234, + "learning_rate": 5.663308983941335e-06, + "loss": 1.4769, + "step": 3808 + }, + { + "epoch": 0.6536811395229106, + "grad_norm": 3.2326051216391236, + "learning_rate": 5.6583011583580026e-06, + "loss": 1.3695, + "step": 3809 + }, + { + "epoch": 0.6538527544190835, + "grad_norm": 3.979681837676408, + "learning_rate": 5.653294674164855e-06, + "loss": 1.5013, + "step": 3810 + }, + { + "epoch": 0.6540243693152565, + "grad_norm": 3.8853775755973565, + "learning_rate": 5.648289532908666e-06, + "loss": 1.5082, + "step": 3811 + }, + { + "epoch": 0.6541959842114295, + "grad_norm": 4.6946791429565256, + "learning_rate": 5.643285736135806e-06, + "loss": 1.8142, + "step": 3812 + }, + { + "epoch": 0.6543675991076026, + "grad_norm": 3.808908044611442, + "learning_rate": 5.638283285392218e-06, + "loss": 1.4008, + "step": 3813 + }, + { + "epoch": 0.6545392140037756, + "grad_norm": 3.645256850711633, + "learning_rate": 5.633282182223436e-06, + "loss": 1.6268, + "step": 3814 + }, + { + "epoch": 0.6547108288999485, + "grad_norm": 3.7812254506882663, + "learning_rate": 5.628282428174579e-06, + "loss": 1.6314, + "step": 3815 + }, + { + "epoch": 0.6548824437961215, + "grad_norm": 3.6569731191614476, + "learning_rate": 5.62328402479035e-06, + "loss": 1.585, + "step": 3816 + }, + { + "epoch": 0.6550540586922945, + "grad_norm": 3.5613744705609744, + "learning_rate": 5.618286973615026e-06, + "loss": 1.5652, + "step": 3817 + }, + { + "epoch": 0.6552256735884675, + "grad_norm": 4.0761604639075495, + "learning_rate": 5.61329127619247e-06, + "loss": 1.7782, + "step": 3818 + }, + { + "epoch": 0.6553972884846405, + "grad_norm": 3.7784871877702244, + "learning_rate": 5.608296934066131e-06, + "loss": 1.4937, + "step": 3819 + }, + { + "epoch": 0.6555689033808134, + "grad_norm": 3.245147096373824, + "learning_rate": 5.603303948779036e-06, + "loss": 1.4278, + "step": 3820 + }, + { + "epoch": 0.6557405182769864, + "grad_norm": 4.863597632653321, + "learning_rate": 5.5983123218738e-06, + "loss": 1.7635, + "step": 3821 + }, + { + "epoch": 0.6559121331731594, + "grad_norm": 3.667927899453101, + "learning_rate": 5.593322054892605e-06, + "loss": 1.7334, + "step": 3822 + }, + { + "epoch": 0.6560837480693325, + "grad_norm": 3.405052814879707, + "learning_rate": 5.588333149377218e-06, + "loss": 1.491, + "step": 3823 + }, + { + "epoch": 0.6562553629655054, + "grad_norm": 3.827261534813475, + "learning_rate": 5.583345606868988e-06, + "loss": 1.5387, + "step": 3824 + }, + { + "epoch": 0.6564269778616784, + "grad_norm": 3.393721533454186, + "learning_rate": 5.5783594289088465e-06, + "loss": 1.6266, + "step": 3825 + }, + { + "epoch": 0.6565985927578514, + "grad_norm": 3.3119573433315592, + "learning_rate": 5.573374617037298e-06, + "loss": 1.5775, + "step": 3826 + }, + { + "epoch": 0.6567702076540244, + "grad_norm": 3.475392878069581, + "learning_rate": 5.568391172794421e-06, + "loss": 1.7677, + "step": 3827 + }, + { + "epoch": 0.6569418225501974, + "grad_norm": 3.947711519121117, + "learning_rate": 5.5634090977198825e-06, + "loss": 1.5809, + "step": 3828 + }, + { + "epoch": 0.6571134374463703, + "grad_norm": 3.215947447834526, + "learning_rate": 5.558428393352914e-06, + "loss": 1.3401, + "step": 3829 + }, + { + "epoch": 0.6572850523425433, + "grad_norm": 3.29675215204399, + "learning_rate": 5.553449061232331e-06, + "loss": 1.4942, + "step": 3830 + }, + { + "epoch": 0.6574566672387163, + "grad_norm": 3.5809315580188, + "learning_rate": 5.548471102896531e-06, + "loss": 1.6686, + "step": 3831 + }, + { + "epoch": 0.6576282821348893, + "grad_norm": 3.4423779321189536, + "learning_rate": 5.543494519883468e-06, + "loss": 1.5861, + "step": 3832 + }, + { + "epoch": 0.6577998970310623, + "grad_norm": 3.5034463744390956, + "learning_rate": 5.53851931373069e-06, + "loss": 1.4006, + "step": 3833 + }, + { + "epoch": 0.6579715119272352, + "grad_norm": 3.2764241047334086, + "learning_rate": 5.5335454859753154e-06, + "loss": 1.6529, + "step": 3834 + }, + { + "epoch": 0.6581431268234083, + "grad_norm": 3.3531414852488126, + "learning_rate": 5.528573038154028e-06, + "loss": 1.3864, + "step": 3835 + }, + { + "epoch": 0.6583147417195813, + "grad_norm": 3.3001063078621087, + "learning_rate": 5.523601971803094e-06, + "loss": 1.1959, + "step": 3836 + }, + { + "epoch": 0.6584863566157543, + "grad_norm": 3.388122985617715, + "learning_rate": 5.5186322884583475e-06, + "loss": 1.4468, + "step": 3837 + }, + { + "epoch": 0.6586579715119273, + "grad_norm": 3.4799586327912655, + "learning_rate": 5.513663989655198e-06, + "loss": 1.7559, + "step": 3838 + }, + { + "epoch": 0.6588295864081002, + "grad_norm": 3.7502500757389354, + "learning_rate": 5.50869707692863e-06, + "loss": 1.6364, + "step": 3839 + }, + { + "epoch": 0.6590012013042732, + "grad_norm": 4.1225471729109975, + "learning_rate": 5.5037315518131975e-06, + "loss": 1.7462, + "step": 3840 + }, + { + "epoch": 0.6591728162004462, + "grad_norm": 3.2344214171439796, + "learning_rate": 5.498767415843023e-06, + "loss": 1.3246, + "step": 3841 + }, + { + "epoch": 0.6593444310966192, + "grad_norm": 3.356319978431104, + "learning_rate": 5.493804670551796e-06, + "loss": 1.5977, + "step": 3842 + }, + { + "epoch": 0.6595160459927921, + "grad_norm": 4.315759669720856, + "learning_rate": 5.488843317472789e-06, + "loss": 1.5148, + "step": 3843 + }, + { + "epoch": 0.6596876608889651, + "grad_norm": 3.669430836405578, + "learning_rate": 5.483883358138835e-06, + "loss": 1.7643, + "step": 3844 + }, + { + "epoch": 0.6598592757851381, + "grad_norm": 3.589622468686828, + "learning_rate": 5.478924794082345e-06, + "loss": 1.7152, + "step": 3845 + }, + { + "epoch": 0.6600308906813112, + "grad_norm": 3.1092434015412986, + "learning_rate": 5.4739676268352885e-06, + "loss": 1.6018, + "step": 3846 + }, + { + "epoch": 0.6602025055774842, + "grad_norm": 4.378320691719215, + "learning_rate": 5.469011857929202e-06, + "loss": 1.5139, + "step": 3847 + }, + { + "epoch": 0.6603741204736571, + "grad_norm": 3.258108429552527, + "learning_rate": 5.4640574888952e-06, + "loss": 1.5399, + "step": 3848 + }, + { + "epoch": 0.6605457353698301, + "grad_norm": 4.24650783765689, + "learning_rate": 5.459104521263962e-06, + "loss": 1.7231, + "step": 3849 + }, + { + "epoch": 0.6607173502660031, + "grad_norm": 3.3134825337100677, + "learning_rate": 5.454152956565737e-06, + "loss": 1.431, + "step": 3850 + }, + { + "epoch": 0.6608889651621761, + "grad_norm": 3.654304765614279, + "learning_rate": 5.449202796330328e-06, + "loss": 1.5113, + "step": 3851 + }, + { + "epoch": 0.6610605800583491, + "grad_norm": 3.1029310907071506, + "learning_rate": 5.444254042087118e-06, + "loss": 1.5245, + "step": 3852 + }, + { + "epoch": 0.661232194954522, + "grad_norm": 3.9369182906762337, + "learning_rate": 5.439306695365045e-06, + "loss": 1.7089, + "step": 3853 + }, + { + "epoch": 0.661403809850695, + "grad_norm": 3.762041391146191, + "learning_rate": 5.43436075769262e-06, + "loss": 1.7283, + "step": 3854 + }, + { + "epoch": 0.661575424746868, + "grad_norm": 3.3277086108831666, + "learning_rate": 5.42941623059792e-06, + "loss": 1.4766, + "step": 3855 + }, + { + "epoch": 0.661747039643041, + "grad_norm": 3.5650734591560647, + "learning_rate": 5.424473115608575e-06, + "loss": 1.564, + "step": 3856 + }, + { + "epoch": 0.6619186545392141, + "grad_norm": 4.164504200892568, + "learning_rate": 5.419531414251788e-06, + "loss": 1.6105, + "step": 3857 + }, + { + "epoch": 0.662090269435387, + "grad_norm": 3.4987612862190285, + "learning_rate": 5.414591128054328e-06, + "loss": 1.5773, + "step": 3858 + }, + { + "epoch": 0.66226188433156, + "grad_norm": 3.0266802239828956, + "learning_rate": 5.409652258542514e-06, + "loss": 1.4863, + "step": 3859 + }, + { + "epoch": 0.662433499227733, + "grad_norm": 4.254773157911801, + "learning_rate": 5.404714807242243e-06, + "loss": 1.5582, + "step": 3860 + }, + { + "epoch": 0.662605114123906, + "grad_norm": 3.685905771110845, + "learning_rate": 5.399778775678958e-06, + "loss": 1.6023, + "step": 3861 + }, + { + "epoch": 0.6627767290200789, + "grad_norm": 3.728081565392676, + "learning_rate": 5.3948441653776755e-06, + "loss": 1.5571, + "step": 3862 + }, + { + "epoch": 0.6629483439162519, + "grad_norm": 3.509278680247271, + "learning_rate": 5.389910977862967e-06, + "loss": 1.4998, + "step": 3863 + }, + { + "epoch": 0.6631199588124249, + "grad_norm": 3.709286049239896, + "learning_rate": 5.384979214658972e-06, + "loss": 1.4901, + "step": 3864 + }, + { + "epoch": 0.6632915737085979, + "grad_norm": 3.2930265761792805, + "learning_rate": 5.380048877289381e-06, + "loss": 1.4237, + "step": 3865 + }, + { + "epoch": 0.6634631886047709, + "grad_norm": 3.114168467724853, + "learning_rate": 5.37511996727744e-06, + "loss": 1.5126, + "step": 3866 + }, + { + "epoch": 0.6636348035009438, + "grad_norm": 3.8053023646331616, + "learning_rate": 5.370192486145968e-06, + "loss": 1.6188, + "step": 3867 + }, + { + "epoch": 0.6638064183971168, + "grad_norm": 3.7342387248357003, + "learning_rate": 5.365266435417335e-06, + "loss": 1.4849, + "step": 3868 + }, + { + "epoch": 0.6639780332932899, + "grad_norm": 4.255470305909646, + "learning_rate": 5.360341816613472e-06, + "loss": 1.7316, + "step": 3869 + }, + { + "epoch": 0.6641496481894629, + "grad_norm": 3.234178012456068, + "learning_rate": 5.355418631255864e-06, + "loss": 1.5925, + "step": 3870 + }, + { + "epoch": 0.6643212630856359, + "grad_norm": 3.368678076546681, + "learning_rate": 5.350496880865548e-06, + "loss": 1.3303, + "step": 3871 + }, + { + "epoch": 0.6644928779818088, + "grad_norm": 3.755630333185734, + "learning_rate": 5.34557656696313e-06, + "loss": 1.7208, + "step": 3872 + }, + { + "epoch": 0.6646644928779818, + "grad_norm": 3.7523038131565043, + "learning_rate": 5.340657691068765e-06, + "loss": 1.5299, + "step": 3873 + }, + { + "epoch": 0.6648361077741548, + "grad_norm": 4.06936890653067, + "learning_rate": 5.335740254702168e-06, + "loss": 1.4689, + "step": 3874 + }, + { + "epoch": 0.6650077226703278, + "grad_norm": 3.4581041710460014, + "learning_rate": 5.330824259382601e-06, + "loss": 1.4403, + "step": 3875 + }, + { + "epoch": 0.6651793375665007, + "grad_norm": 3.703610318006061, + "learning_rate": 5.325909706628891e-06, + "loss": 1.4609, + "step": 3876 + }, + { + "epoch": 0.6653509524626737, + "grad_norm": 3.6637388270178706, + "learning_rate": 5.32099659795941e-06, + "loss": 1.5536, + "step": 3877 + }, + { + "epoch": 0.6655225673588467, + "grad_norm": 3.356576624946815, + "learning_rate": 5.31608493489209e-06, + "loss": 1.6712, + "step": 3878 + }, + { + "epoch": 0.6656941822550198, + "grad_norm": 3.8669640864273096, + "learning_rate": 5.311174718944417e-06, + "loss": 1.7317, + "step": 3879 + }, + { + "epoch": 0.6658657971511928, + "grad_norm": 3.398308623956015, + "learning_rate": 5.3062659516334224e-06, + "loss": 1.6039, + "step": 3880 + }, + { + "epoch": 0.6660374120473657, + "grad_norm": 3.6410260692023466, + "learning_rate": 5.301358634475697e-06, + "loss": 1.3994, + "step": 3881 + }, + { + "epoch": 0.6662090269435387, + "grad_norm": 2.854901084902104, + "learning_rate": 5.296452768987388e-06, + "loss": 1.3275, + "step": 3882 + }, + { + "epoch": 0.6663806418397117, + "grad_norm": 3.2308176254722323, + "learning_rate": 5.291548356684177e-06, + "loss": 1.6324, + "step": 3883 + }, + { + "epoch": 0.6665522567358847, + "grad_norm": 3.892612168344562, + "learning_rate": 5.286645399081317e-06, + "loss": 1.5143, + "step": 3884 + }, + { + "epoch": 0.6667238716320577, + "grad_norm": 3.730994175188248, + "learning_rate": 5.281743897693594e-06, + "loss": 1.7345, + "step": 3885 + }, + { + "epoch": 0.6668954865282306, + "grad_norm": 3.69902992748495, + "learning_rate": 5.276843854035356e-06, + "loss": 1.7559, + "step": 3886 + }, + { + "epoch": 0.6670671014244036, + "grad_norm": 2.9942314219098405, + "learning_rate": 5.271945269620496e-06, + "loss": 1.4323, + "step": 3887 + }, + { + "epoch": 0.6672387163205766, + "grad_norm": 3.3150870539401334, + "learning_rate": 5.267048145962461e-06, + "loss": 1.5008, + "step": 3888 + }, + { + "epoch": 0.6674103312167496, + "grad_norm": 3.5851329871551605, + "learning_rate": 5.2621524845742386e-06, + "loss": 1.252, + "step": 3889 + }, + { + "epoch": 0.6675819461129227, + "grad_norm": 4.6445493644629, + "learning_rate": 5.2572582869683655e-06, + "loss": 1.7305, + "step": 3890 + }, + { + "epoch": 0.6677535610090956, + "grad_norm": 3.871220324859534, + "learning_rate": 5.252365554656933e-06, + "loss": 1.657, + "step": 3891 + }, + { + "epoch": 0.6679251759052686, + "grad_norm": 3.4778828998601305, + "learning_rate": 5.247474289151574e-06, + "loss": 1.5563, + "step": 3892 + }, + { + "epoch": 0.6680967908014416, + "grad_norm": 3.8592138961417977, + "learning_rate": 5.2425844919634764e-06, + "loss": 1.4394, + "step": 3893 + }, + { + "epoch": 0.6682684056976146, + "grad_norm": 3.8650916338475887, + "learning_rate": 5.237696164603363e-06, + "loss": 1.5094, + "step": 3894 + }, + { + "epoch": 0.6684400205937875, + "grad_norm": 3.309052291731976, + "learning_rate": 5.232809308581504e-06, + "loss": 1.6879, + "step": 3895 + }, + { + "epoch": 0.6686116354899605, + "grad_norm": 3.2356204945325384, + "learning_rate": 5.227923925407723e-06, + "loss": 1.4779, + "step": 3896 + }, + { + "epoch": 0.6687832503861335, + "grad_norm": 3.512022359886158, + "learning_rate": 5.223040016591383e-06, + "loss": 1.6374, + "step": 3897 + }, + { + "epoch": 0.6689548652823065, + "grad_norm": 3.2002671797979407, + "learning_rate": 5.218157583641399e-06, + "loss": 1.3384, + "step": 3898 + }, + { + "epoch": 0.6691264801784795, + "grad_norm": 3.7582122403995695, + "learning_rate": 5.213276628066213e-06, + "loss": 1.6508, + "step": 3899 + }, + { + "epoch": 0.6692980950746524, + "grad_norm": 3.9795046424670812, + "learning_rate": 5.2083971513738315e-06, + "loss": 1.7793, + "step": 3900 + }, + { + "epoch": 0.6694697099708254, + "grad_norm": 3.475347264737436, + "learning_rate": 5.2035191550717856e-06, + "loss": 1.6378, + "step": 3901 + }, + { + "epoch": 0.6696413248669985, + "grad_norm": 3.6550460250906895, + "learning_rate": 5.19864264066716e-06, + "loss": 1.5344, + "step": 3902 + }, + { + "epoch": 0.6698129397631715, + "grad_norm": 3.591239813010484, + "learning_rate": 5.193767609666584e-06, + "loss": 1.4901, + "step": 3903 + }, + { + "epoch": 0.6699845546593445, + "grad_norm": 3.958480771878438, + "learning_rate": 5.188894063576217e-06, + "loss": 1.4217, + "step": 3904 + }, + { + "epoch": 0.6701561695555174, + "grad_norm": 3.363445769887249, + "learning_rate": 5.184022003901767e-06, + "loss": 1.3202, + "step": 3905 + }, + { + "epoch": 0.6703277844516904, + "grad_norm": 3.5026037587256593, + "learning_rate": 5.179151432148486e-06, + "loss": 1.5207, + "step": 3906 + }, + { + "epoch": 0.6704993993478634, + "grad_norm": 4.397067444312862, + "learning_rate": 5.174282349821158e-06, + "loss": 1.6645, + "step": 3907 + }, + { + "epoch": 0.6706710142440364, + "grad_norm": 3.4970869102819178, + "learning_rate": 5.169414758424115e-06, + "loss": 1.5136, + "step": 3908 + }, + { + "epoch": 0.6708426291402094, + "grad_norm": 2.9771055986395134, + "learning_rate": 5.164548659461219e-06, + "loss": 1.3559, + "step": 3909 + }, + { + "epoch": 0.6710142440363823, + "grad_norm": 2.8996405171780557, + "learning_rate": 5.159684054435879e-06, + "loss": 1.5426, + "step": 3910 + }, + { + "epoch": 0.6711858589325553, + "grad_norm": 4.678963799671272, + "learning_rate": 5.154820944851038e-06, + "loss": 1.5147, + "step": 3911 + }, + { + "epoch": 0.6713574738287283, + "grad_norm": 2.9234519732780657, + "learning_rate": 5.149959332209187e-06, + "loss": 1.3999, + "step": 3912 + }, + { + "epoch": 0.6715290887249014, + "grad_norm": 3.885121016047288, + "learning_rate": 5.145099218012339e-06, + "loss": 1.8163, + "step": 3913 + }, + { + "epoch": 0.6717007036210743, + "grad_norm": 3.312818127045676, + "learning_rate": 5.140240603762048e-06, + "loss": 1.701, + "step": 3914 + }, + { + "epoch": 0.6718723185172473, + "grad_norm": 3.219063271686853, + "learning_rate": 5.135383490959412e-06, + "loss": 1.4818, + "step": 3915 + }, + { + "epoch": 0.6720439334134203, + "grad_norm": 3.243975262572838, + "learning_rate": 5.130527881105062e-06, + "loss": 1.5344, + "step": 3916 + }, + { + "epoch": 0.6722155483095933, + "grad_norm": 3.5153863737130764, + "learning_rate": 5.125673775699165e-06, + "loss": 1.5154, + "step": 3917 + }, + { + "epoch": 0.6723871632057663, + "grad_norm": 3.2679862701220874, + "learning_rate": 5.12082117624142e-06, + "loss": 1.6109, + "step": 3918 + }, + { + "epoch": 0.6725587781019392, + "grad_norm": 4.416280155724237, + "learning_rate": 5.115970084231059e-06, + "loss": 1.5567, + "step": 3919 + }, + { + "epoch": 0.6727303929981122, + "grad_norm": 3.697086990633611, + "learning_rate": 5.111120501166855e-06, + "loss": 1.5335, + "step": 3920 + }, + { + "epoch": 0.6729020078942852, + "grad_norm": 3.265268161005393, + "learning_rate": 5.10627242854711e-06, + "loss": 1.4798, + "step": 3921 + }, + { + "epoch": 0.6730736227904582, + "grad_norm": 3.4879151569492337, + "learning_rate": 5.101425867869667e-06, + "loss": 1.3589, + "step": 3922 + }, + { + "epoch": 0.6732452376866312, + "grad_norm": 3.497302152468901, + "learning_rate": 5.096580820631888e-06, + "loss": 1.5112, + "step": 3923 + }, + { + "epoch": 0.6734168525828041, + "grad_norm": 4.016720530507737, + "learning_rate": 5.091737288330683e-06, + "loss": 1.3377, + "step": 3924 + }, + { + "epoch": 0.6735884674789772, + "grad_norm": 3.258448118671295, + "learning_rate": 5.086895272462475e-06, + "loss": 1.507, + "step": 3925 + }, + { + "epoch": 0.6737600823751502, + "grad_norm": 3.453691002203017, + "learning_rate": 5.08205477452324e-06, + "loss": 1.6845, + "step": 3926 + }, + { + "epoch": 0.6739316972713232, + "grad_norm": 3.890507740456927, + "learning_rate": 5.0772157960084724e-06, + "loss": 1.6099, + "step": 3927 + }, + { + "epoch": 0.6741033121674962, + "grad_norm": 4.423308475343997, + "learning_rate": 5.072378338413195e-06, + "loss": 1.5979, + "step": 3928 + }, + { + "epoch": 0.6742749270636691, + "grad_norm": 3.379399039876328, + "learning_rate": 5.067542403231968e-06, + "loss": 1.3022, + "step": 3929 + }, + { + "epoch": 0.6744465419598421, + "grad_norm": 3.321410218148661, + "learning_rate": 5.062707991958884e-06, + "loss": 1.5074, + "step": 3930 + }, + { + "epoch": 0.6746181568560151, + "grad_norm": 3.5751191505961994, + "learning_rate": 5.05787510608755e-06, + "loss": 1.5229, + "step": 3931 + }, + { + "epoch": 0.6747897717521881, + "grad_norm": 3.421852501349972, + "learning_rate": 5.053043747111119e-06, + "loss": 1.6119, + "step": 3932 + }, + { + "epoch": 0.674961386648361, + "grad_norm": 4.341804251061823, + "learning_rate": 5.048213916522255e-06, + "loss": 1.7233, + "step": 3933 + }, + { + "epoch": 0.675133001544534, + "grad_norm": 3.7237629610082505, + "learning_rate": 5.043385615813166e-06, + "loss": 1.5585, + "step": 3934 + }, + { + "epoch": 0.675304616440707, + "grad_norm": 4.215713304774691, + "learning_rate": 5.038558846475579e-06, + "loss": 1.582, + "step": 3935 + }, + { + "epoch": 0.6754762313368801, + "grad_norm": 3.782032656063582, + "learning_rate": 5.033733610000751e-06, + "loss": 1.5575, + "step": 3936 + }, + { + "epoch": 0.6756478462330531, + "grad_norm": 3.5289866908044862, + "learning_rate": 5.028909907879462e-06, + "loss": 1.5646, + "step": 3937 + }, + { + "epoch": 0.675819461129226, + "grad_norm": 4.2239655534511655, + "learning_rate": 5.024087741602015e-06, + "loss": 1.7533, + "step": 3938 + }, + { + "epoch": 0.675991076025399, + "grad_norm": 3.291419434747381, + "learning_rate": 5.019267112658247e-06, + "loss": 1.5589, + "step": 3939 + }, + { + "epoch": 0.676162690921572, + "grad_norm": 3.310097163665421, + "learning_rate": 5.014448022537517e-06, + "loss": 1.6601, + "step": 3940 + }, + { + "epoch": 0.676334305817745, + "grad_norm": 4.093098308746624, + "learning_rate": 5.009630472728711e-06, + "loss": 1.6844, + "step": 3941 + }, + { + "epoch": 0.676505920713918, + "grad_norm": 3.561701002419818, + "learning_rate": 5.0048144647202294e-06, + "loss": 1.5865, + "step": 3942 + }, + { + "epoch": 0.6766775356100909, + "grad_norm": 3.7609693232779935, + "learning_rate": 5.000000000000003e-06, + "loss": 1.5708, + "step": 3943 + }, + { + "epoch": 0.6768491505062639, + "grad_norm": 3.1049425636013828, + "learning_rate": 4.9951870800554855e-06, + "loss": 1.6051, + "step": 3944 + }, + { + "epoch": 0.6770207654024369, + "grad_norm": 3.8483992368754807, + "learning_rate": 4.990375706373657e-06, + "loss": 1.4232, + "step": 3945 + }, + { + "epoch": 0.67719238029861, + "grad_norm": 3.5360274189874366, + "learning_rate": 4.985565880441017e-06, + "loss": 1.5147, + "step": 3946 + }, + { + "epoch": 0.6773639951947829, + "grad_norm": 3.94111134637556, + "learning_rate": 4.98075760374358e-06, + "loss": 1.7055, + "step": 3947 + }, + { + "epoch": 0.6775356100909559, + "grad_norm": 3.8728257073790746, + "learning_rate": 4.97595087776689e-06, + "loss": 1.4102, + "step": 3948 + }, + { + "epoch": 0.6777072249871289, + "grad_norm": 3.8172761566025573, + "learning_rate": 4.971145703996015e-06, + "loss": 1.4536, + "step": 3949 + }, + { + "epoch": 0.6778788398833019, + "grad_norm": 3.9262082706288686, + "learning_rate": 4.9663420839155305e-06, + "loss": 1.435, + "step": 3950 + }, + { + "epoch": 0.6780504547794749, + "grad_norm": 3.6703704320806385, + "learning_rate": 4.961540019009546e-06, + "loss": 1.3904, + "step": 3951 + }, + { + "epoch": 0.6782220696756478, + "grad_norm": 3.2000342228278797, + "learning_rate": 4.956739510761678e-06, + "loss": 1.4211, + "step": 3952 + }, + { + "epoch": 0.6783936845718208, + "grad_norm": 3.635430532305922, + "learning_rate": 4.9519405606550704e-06, + "loss": 1.6128, + "step": 3953 + }, + { + "epoch": 0.6785652994679938, + "grad_norm": 3.570126988570555, + "learning_rate": 4.947143170172385e-06, + "loss": 1.5894, + "step": 3954 + }, + { + "epoch": 0.6787369143641668, + "grad_norm": 3.420246295829541, + "learning_rate": 4.942347340795803e-06, + "loss": 1.2383, + "step": 3955 + }, + { + "epoch": 0.6789085292603398, + "grad_norm": 3.1694378632431643, + "learning_rate": 4.9375530740070174e-06, + "loss": 1.4508, + "step": 3956 + }, + { + "epoch": 0.6790801441565127, + "grad_norm": 3.2618957057509683, + "learning_rate": 4.932760371287238e-06, + "loss": 1.5939, + "step": 3957 + }, + { + "epoch": 0.6792517590526858, + "grad_norm": 4.197017022057773, + "learning_rate": 4.927969234117199e-06, + "loss": 1.7631, + "step": 3958 + }, + { + "epoch": 0.6794233739488588, + "grad_norm": 3.9309922658244183, + "learning_rate": 4.923179663977144e-06, + "loss": 1.6707, + "step": 3959 + }, + { + "epoch": 0.6795949888450318, + "grad_norm": 3.631683877755284, + "learning_rate": 4.9183916623468446e-06, + "loss": 1.6565, + "step": 3960 + }, + { + "epoch": 0.6797666037412048, + "grad_norm": 3.858389880007458, + "learning_rate": 4.913605230705568e-06, + "loss": 1.4327, + "step": 3961 + }, + { + "epoch": 0.6799382186373777, + "grad_norm": 3.12840387065218, + "learning_rate": 4.908820370532109e-06, + "loss": 1.3767, + "step": 3962 + }, + { + "epoch": 0.6801098335335507, + "grad_norm": 4.593703744738146, + "learning_rate": 4.904037083304774e-06, + "loss": 1.5566, + "step": 3963 + }, + { + "epoch": 0.6802814484297237, + "grad_norm": 3.6983099720446493, + "learning_rate": 4.899255370501388e-06, + "loss": 1.608, + "step": 3964 + }, + { + "epoch": 0.6804530633258967, + "grad_norm": 3.2112995316900967, + "learning_rate": 4.894475233599287e-06, + "loss": 1.3857, + "step": 3965 + }, + { + "epoch": 0.6806246782220696, + "grad_norm": 3.5235278000118737, + "learning_rate": 4.889696674075313e-06, + "loss": 1.5892, + "step": 3966 + }, + { + "epoch": 0.6807962931182426, + "grad_norm": 4.175060829090475, + "learning_rate": 4.884919693405833e-06, + "loss": 1.362, + "step": 3967 + }, + { + "epoch": 0.6809679080144156, + "grad_norm": 4.186048426240538, + "learning_rate": 4.880144293066711e-06, + "loss": 1.538, + "step": 3968 + }, + { + "epoch": 0.6811395229105887, + "grad_norm": 3.591762553969462, + "learning_rate": 4.875370474533339e-06, + "loss": 1.4474, + "step": 3969 + }, + { + "epoch": 0.6813111378067617, + "grad_norm": 3.01527327008247, + "learning_rate": 4.8705982392806115e-06, + "loss": 1.4743, + "step": 3970 + }, + { + "epoch": 0.6814827527029346, + "grad_norm": 4.530116107800682, + "learning_rate": 4.865827588782932e-06, + "loss": 1.8223, + "step": 3971 + }, + { + "epoch": 0.6816543675991076, + "grad_norm": 3.2203214313951447, + "learning_rate": 4.861058524514219e-06, + "loss": 1.4948, + "step": 3972 + }, + { + "epoch": 0.6818259824952806, + "grad_norm": 4.499934906807462, + "learning_rate": 4.856291047947904e-06, + "loss": 1.6804, + "step": 3973 + }, + { + "epoch": 0.6819975973914536, + "grad_norm": 3.84182167045221, + "learning_rate": 4.8515251605569155e-06, + "loss": 1.508, + "step": 3974 + }, + { + "epoch": 0.6821692122876266, + "grad_norm": 3.679629477015102, + "learning_rate": 4.846760863813707e-06, + "loss": 1.4963, + "step": 3975 + }, + { + "epoch": 0.6823408271837995, + "grad_norm": 3.2077864245371317, + "learning_rate": 4.841998159190224e-06, + "loss": 1.3114, + "step": 3976 + }, + { + "epoch": 0.6825124420799725, + "grad_norm": 3.2994927698894694, + "learning_rate": 4.837237048157933e-06, + "loss": 1.4663, + "step": 3977 + }, + { + "epoch": 0.6826840569761455, + "grad_norm": 5.101126384386216, + "learning_rate": 4.832477532187805e-06, + "loss": 1.5771, + "step": 3978 + }, + { + "epoch": 0.6828556718723185, + "grad_norm": 3.5809334318824257, + "learning_rate": 4.827719612750318e-06, + "loss": 1.4568, + "step": 3979 + }, + { + "epoch": 0.6830272867684916, + "grad_norm": 3.941324145961898, + "learning_rate": 4.822963291315453e-06, + "loss": 1.478, + "step": 3980 + }, + { + "epoch": 0.6831989016646645, + "grad_norm": 3.9843793250742983, + "learning_rate": 4.818208569352697e-06, + "loss": 1.4362, + "step": 3981 + }, + { + "epoch": 0.6833705165608375, + "grad_norm": 3.407457794298381, + "learning_rate": 4.813455448331048e-06, + "loss": 1.5836, + "step": 3982 + }, + { + "epoch": 0.6835421314570105, + "grad_norm": 3.7493825930728195, + "learning_rate": 4.808703929719009e-06, + "loss": 1.5222, + "step": 3983 + }, + { + "epoch": 0.6837137463531835, + "grad_norm": 4.707694698208893, + "learning_rate": 4.803954014984589e-06, + "loss": 1.5301, + "step": 3984 + }, + { + "epoch": 0.6838853612493564, + "grad_norm": 3.6196378584014792, + "learning_rate": 4.799205705595294e-06, + "loss": 1.7968, + "step": 3985 + }, + { + "epoch": 0.6840569761455294, + "grad_norm": 3.692811793847762, + "learning_rate": 4.794459003018137e-06, + "loss": 1.7723, + "step": 3986 + }, + { + "epoch": 0.6842285910417024, + "grad_norm": 3.4371220748045483, + "learning_rate": 4.7897139087196385e-06, + "loss": 1.3896, + "step": 3987 + }, + { + "epoch": 0.6844002059378754, + "grad_norm": 4.620643225754141, + "learning_rate": 4.7849704241658205e-06, + "loss": 1.6415, + "step": 3988 + }, + { + "epoch": 0.6845718208340484, + "grad_norm": 7.274883933376554, + "learning_rate": 4.780228550822209e-06, + "loss": 1.4716, + "step": 3989 + }, + { + "epoch": 0.6847434357302213, + "grad_norm": 3.6952095520194157, + "learning_rate": 4.775488290153824e-06, + "loss": 1.7042, + "step": 3990 + }, + { + "epoch": 0.6849150506263944, + "grad_norm": 3.590090572326343, + "learning_rate": 4.7707496436252e-06, + "loss": 1.7137, + "step": 3991 + }, + { + "epoch": 0.6850866655225674, + "grad_norm": 3.3692361967380475, + "learning_rate": 4.76601261270036e-06, + "loss": 1.7711, + "step": 3992 + }, + { + "epoch": 0.6852582804187404, + "grad_norm": 3.691141304216351, + "learning_rate": 4.761277198842838e-06, + "loss": 1.546, + "step": 3993 + }, + { + "epoch": 0.6854298953149134, + "grad_norm": 3.460612458724902, + "learning_rate": 4.756543403515666e-06, + "loss": 1.5853, + "step": 3994 + }, + { + "epoch": 0.6856015102110863, + "grad_norm": 3.5285664382028434, + "learning_rate": 4.751811228181369e-06, + "loss": 1.6726, + "step": 3995 + }, + { + "epoch": 0.6857731251072593, + "grad_norm": 3.4892684150041466, + "learning_rate": 4.747080674301981e-06, + "loss": 1.5143, + "step": 3996 + }, + { + "epoch": 0.6859447400034323, + "grad_norm": 3.9705401165250587, + "learning_rate": 4.742351743339032e-06, + "loss": 1.8011, + "step": 3997 + }, + { + "epoch": 0.6861163548996053, + "grad_norm": 3.7408178732131163, + "learning_rate": 4.737624436753544e-06, + "loss": 1.5359, + "step": 3998 + }, + { + "epoch": 0.6862879697957782, + "grad_norm": 3.604401902013977, + "learning_rate": 4.7328987560060505e-06, + "loss": 1.5522, + "step": 3999 + }, + { + "epoch": 0.6864595846919512, + "grad_norm": 4.525970195286252, + "learning_rate": 4.728174702556566e-06, + "loss": 1.7636, + "step": 4000 + }, + { + "epoch": 0.6866311995881242, + "grad_norm": 3.9208114400596816, + "learning_rate": 4.723452277864617e-06, + "loss": 1.7299, + "step": 4001 + }, + { + "epoch": 0.6868028144842973, + "grad_norm": 3.846729956146148, + "learning_rate": 4.718731483389217e-06, + "loss": 1.659, + "step": 4002 + }, + { + "epoch": 0.6869744293804703, + "grad_norm": 3.4020880836863716, + "learning_rate": 4.7140123205888885e-06, + "loss": 1.7365, + "step": 4003 + }, + { + "epoch": 0.6871460442766432, + "grad_norm": 4.3065910455440575, + "learning_rate": 4.709294790921634e-06, + "loss": 1.5898, + "step": 4004 + }, + { + "epoch": 0.6873176591728162, + "grad_norm": 4.421336171931853, + "learning_rate": 4.704578895844956e-06, + "loss": 1.5378, + "step": 4005 + }, + { + "epoch": 0.6874892740689892, + "grad_norm": 3.6931190135132277, + "learning_rate": 4.699864636815856e-06, + "loss": 1.5436, + "step": 4006 + }, + { + "epoch": 0.6876608889651622, + "grad_norm": 3.7940630088136094, + "learning_rate": 4.695152015290833e-06, + "loss": 1.5711, + "step": 4007 + }, + { + "epoch": 0.6878325038613352, + "grad_norm": 3.358202511057543, + "learning_rate": 4.690441032725876e-06, + "loss": 1.5664, + "step": 4008 + }, + { + "epoch": 0.6880041187575081, + "grad_norm": 3.9204318584868174, + "learning_rate": 4.685731690576464e-06, + "loss": 1.677, + "step": 4009 + }, + { + "epoch": 0.6881757336536811, + "grad_norm": 3.5628801706867734, + "learning_rate": 4.681023990297568e-06, + "loss": 1.3552, + "step": 4010 + }, + { + "epoch": 0.6883473485498541, + "grad_norm": 4.575139356614996, + "learning_rate": 4.6763179333436635e-06, + "loss": 1.4469, + "step": 4011 + }, + { + "epoch": 0.6885189634460271, + "grad_norm": 3.963644885046186, + "learning_rate": 4.6716135211687075e-06, + "loss": 1.2191, + "step": 4012 + }, + { + "epoch": 0.6886905783422002, + "grad_norm": 3.696514661951681, + "learning_rate": 4.666910755226157e-06, + "loss": 1.4355, + "step": 4013 + }, + { + "epoch": 0.688862193238373, + "grad_norm": 3.513993535842884, + "learning_rate": 4.66220963696895e-06, + "loss": 1.5328, + "step": 4014 + }, + { + "epoch": 0.6890338081345461, + "grad_norm": 3.4321143564786074, + "learning_rate": 4.657510167849525e-06, + "loss": 1.491, + "step": 4015 + }, + { + "epoch": 0.6892054230307191, + "grad_norm": 3.929852951071529, + "learning_rate": 4.652812349319803e-06, + "loss": 1.3728, + "step": 4016 + }, + { + "epoch": 0.6893770379268921, + "grad_norm": 3.3265771772208454, + "learning_rate": 4.6481161828312e-06, + "loss": 1.6598, + "step": 4017 + }, + { + "epoch": 0.689548652823065, + "grad_norm": 3.306244487602365, + "learning_rate": 4.643421669834628e-06, + "loss": 1.4037, + "step": 4018 + }, + { + "epoch": 0.689720267719238, + "grad_norm": 4.06921468185158, + "learning_rate": 4.638728811780471e-06, + "loss": 1.6693, + "step": 4019 + }, + { + "epoch": 0.689891882615411, + "grad_norm": 3.593139803176703, + "learning_rate": 4.634037610118616e-06, + "loss": 1.2728, + "step": 4020 + }, + { + "epoch": 0.690063497511584, + "grad_norm": 4.132664894066803, + "learning_rate": 4.629348066298438e-06, + "loss": 1.5557, + "step": 4021 + }, + { + "epoch": 0.690235112407757, + "grad_norm": 4.290982804283574, + "learning_rate": 4.624660181768788e-06, + "loss": 1.549, + "step": 4022 + }, + { + "epoch": 0.6904067273039299, + "grad_norm": 3.3989843088938216, + "learning_rate": 4.619973957978018e-06, + "loss": 1.3847, + "step": 4023 + }, + { + "epoch": 0.690578342200103, + "grad_norm": 3.3372460207993027, + "learning_rate": 4.6152893963739555e-06, + "loss": 1.5603, + "step": 4024 + }, + { + "epoch": 0.690749957096276, + "grad_norm": 3.7083933807875407, + "learning_rate": 4.610606498403924e-06, + "loss": 1.5215, + "step": 4025 + }, + { + "epoch": 0.690921571992449, + "grad_norm": 3.56722328582263, + "learning_rate": 4.605925265514726e-06, + "loss": 1.4015, + "step": 4026 + }, + { + "epoch": 0.691093186888622, + "grad_norm": 3.7741877114155358, + "learning_rate": 4.601245699152659e-06, + "loss": 1.5962, + "step": 4027 + }, + { + "epoch": 0.6912648017847949, + "grad_norm": 3.3260800052987745, + "learning_rate": 4.596567800763494e-06, + "loss": 1.7645, + "step": 4028 + }, + { + "epoch": 0.6914364166809679, + "grad_norm": 4.328836835025154, + "learning_rate": 4.591891571792489e-06, + "loss": 1.8139, + "step": 4029 + }, + { + "epoch": 0.6916080315771409, + "grad_norm": 3.828612340224736, + "learning_rate": 4.587217013684392e-06, + "loss": 1.5977, + "step": 4030 + }, + { + "epoch": 0.6917796464733139, + "grad_norm": 3.9756673634923003, + "learning_rate": 4.582544127883432e-06, + "loss": 1.7308, + "step": 4031 + }, + { + "epoch": 0.6919512613694869, + "grad_norm": 3.464498078097296, + "learning_rate": 4.5778729158333245e-06, + "loss": 1.5444, + "step": 4032 + }, + { + "epoch": 0.6921228762656598, + "grad_norm": 3.843080611596852, + "learning_rate": 4.57320337897726e-06, + "loss": 1.5149, + "step": 4033 + }, + { + "epoch": 0.6922944911618328, + "grad_norm": 3.7335864572779074, + "learning_rate": 4.568535518757915e-06, + "loss": 1.767, + "step": 4034 + }, + { + "epoch": 0.6924661060580058, + "grad_norm": 3.693415003019225, + "learning_rate": 4.56386933661745e-06, + "loss": 1.5709, + "step": 4035 + }, + { + "epoch": 0.6926377209541789, + "grad_norm": 3.7245490024756664, + "learning_rate": 4.559204833997507e-06, + "loss": 1.5156, + "step": 4036 + }, + { + "epoch": 0.6928093358503518, + "grad_norm": 3.9924995921847755, + "learning_rate": 4.554542012339212e-06, + "loss": 1.5499, + "step": 4037 + }, + { + "epoch": 0.6929809507465248, + "grad_norm": 3.973291131857169, + "learning_rate": 4.549880873083158e-06, + "loss": 1.8463, + "step": 4038 + }, + { + "epoch": 0.6931525656426978, + "grad_norm": 3.433478402835639, + "learning_rate": 4.545221417669438e-06, + "loss": 1.5541, + "step": 4039 + }, + { + "epoch": 0.6933241805388708, + "grad_norm": 4.639568255700584, + "learning_rate": 4.540563647537608e-06, + "loss": 1.6661, + "step": 4040 + }, + { + "epoch": 0.6934957954350438, + "grad_norm": 3.450959263034471, + "learning_rate": 4.535907564126711e-06, + "loss": 1.3996, + "step": 4041 + }, + { + "epoch": 0.6936674103312167, + "grad_norm": 3.654764062340256, + "learning_rate": 4.5312531688752735e-06, + "loss": 1.5362, + "step": 4042 + }, + { + "epoch": 0.6938390252273897, + "grad_norm": 3.4076596318593326, + "learning_rate": 4.526600463221287e-06, + "loss": 1.42, + "step": 4043 + }, + { + "epoch": 0.6940106401235627, + "grad_norm": 3.939674494036258, + "learning_rate": 4.521949448602231e-06, + "loss": 1.2365, + "step": 4044 + }, + { + "epoch": 0.6941822550197357, + "grad_norm": 3.7290846451393733, + "learning_rate": 4.5173001264550665e-06, + "loss": 1.6374, + "step": 4045 + }, + { + "epoch": 0.6943538699159088, + "grad_norm": 3.434813455322173, + "learning_rate": 4.5126524982162165e-06, + "loss": 1.6228, + "step": 4046 + }, + { + "epoch": 0.6945254848120817, + "grad_norm": 3.720045428205574, + "learning_rate": 4.508006565321599e-06, + "loss": 1.7048, + "step": 4047 + }, + { + "epoch": 0.6946970997082547, + "grad_norm": 3.7676548823936837, + "learning_rate": 4.503362329206589e-06, + "loss": 1.4129, + "step": 4048 + }, + { + "epoch": 0.6948687146044277, + "grad_norm": 3.508726783470997, + "learning_rate": 4.498719791306051e-06, + "loss": 1.6265, + "step": 4049 + }, + { + "epoch": 0.6950403295006007, + "grad_norm": 3.373178187930436, + "learning_rate": 4.494078953054323e-06, + "loss": 1.5656, + "step": 4050 + }, + { + "epoch": 0.6952119443967736, + "grad_norm": 3.9232389110692605, + "learning_rate": 4.489439815885218e-06, + "loss": 1.4648, + "step": 4051 + }, + { + "epoch": 0.6953835592929466, + "grad_norm": 3.6837886057667797, + "learning_rate": 4.484802381232018e-06, + "loss": 1.4707, + "step": 4052 + }, + { + "epoch": 0.6955551741891196, + "grad_norm": 3.9613633675761895, + "learning_rate": 4.480166650527479e-06, + "loss": 1.779, + "step": 4053 + }, + { + "epoch": 0.6957267890852926, + "grad_norm": 3.663444023305081, + "learning_rate": 4.475532625203837e-06, + "loss": 1.5855, + "step": 4054 + }, + { + "epoch": 0.6958984039814656, + "grad_norm": 3.3590646488139084, + "learning_rate": 4.470900306692797e-06, + "loss": 1.5041, + "step": 4055 + }, + { + "epoch": 0.6960700188776385, + "grad_norm": 3.403868381805293, + "learning_rate": 4.466269696425543e-06, + "loss": 1.4067, + "step": 4056 + }, + { + "epoch": 0.6962416337738115, + "grad_norm": 3.4072122144410657, + "learning_rate": 4.461640795832721e-06, + "loss": 1.5374, + "step": 4057 + }, + { + "epoch": 0.6964132486699846, + "grad_norm": 4.699780128207795, + "learning_rate": 4.457013606344451e-06, + "loss": 1.4838, + "step": 4058 + }, + { + "epoch": 0.6965848635661576, + "grad_norm": 3.086063252298173, + "learning_rate": 4.45238812939033e-06, + "loss": 1.4686, + "step": 4059 + }, + { + "epoch": 0.6967564784623306, + "grad_norm": 3.8135958341538974, + "learning_rate": 4.447764366399424e-06, + "loss": 1.6902, + "step": 4060 + }, + { + "epoch": 0.6969280933585035, + "grad_norm": 3.932678189265142, + "learning_rate": 4.44314231880027e-06, + "loss": 1.6853, + "step": 4061 + }, + { + "epoch": 0.6970997082546765, + "grad_norm": 3.733872867187891, + "learning_rate": 4.438521988020867e-06, + "loss": 1.5015, + "step": 4062 + }, + { + "epoch": 0.6972713231508495, + "grad_norm": 6.375278236006696, + "learning_rate": 4.4339033754886974e-06, + "loss": 1.602, + "step": 4063 + }, + { + "epoch": 0.6974429380470225, + "grad_norm": 5.154963652048058, + "learning_rate": 4.429286482630698e-06, + "loss": 1.6382, + "step": 4064 + }, + { + "epoch": 0.6976145529431955, + "grad_norm": 3.672128824074522, + "learning_rate": 4.424671310873286e-06, + "loss": 1.5292, + "step": 4065 + }, + { + "epoch": 0.6977861678393684, + "grad_norm": 3.744522790354073, + "learning_rate": 4.420057861642344e-06, + "loss": 1.7362, + "step": 4066 + }, + { + "epoch": 0.6979577827355414, + "grad_norm": 3.1371099955769894, + "learning_rate": 4.415446136363216e-06, + "loss": 1.3375, + "step": 4067 + }, + { + "epoch": 0.6981293976317144, + "grad_norm": 3.945222515907118, + "learning_rate": 4.41083613646072e-06, + "loss": 1.7616, + "step": 4068 + }, + { + "epoch": 0.6983010125278875, + "grad_norm": 3.91561660874161, + "learning_rate": 4.406227863359144e-06, + "loss": 1.9275, + "step": 4069 + }, + { + "epoch": 0.6984726274240604, + "grad_norm": 3.44762149640812, + "learning_rate": 4.40162131848223e-06, + "loss": 1.4889, + "step": 4070 + }, + { + "epoch": 0.6986442423202334, + "grad_norm": 3.4902156664961175, + "learning_rate": 4.3970165032532e-06, + "loss": 1.4691, + "step": 4071 + }, + { + "epoch": 0.6988158572164064, + "grad_norm": 3.231608556726426, + "learning_rate": 4.39241341909473e-06, + "loss": 1.4245, + "step": 4072 + }, + { + "epoch": 0.6989874721125794, + "grad_norm": 3.616164952747308, + "learning_rate": 4.387812067428968e-06, + "loss": 1.4924, + "step": 4073 + }, + { + "epoch": 0.6991590870087524, + "grad_norm": 4.397638224480771, + "learning_rate": 4.383212449677528e-06, + "loss": 1.3925, + "step": 4074 + }, + { + "epoch": 0.6993307019049253, + "grad_norm": 3.603148272767758, + "learning_rate": 4.378614567261487e-06, + "loss": 1.4656, + "step": 4075 + }, + { + "epoch": 0.6995023168010983, + "grad_norm": 3.9208723980667224, + "learning_rate": 4.3740184216013805e-06, + "loss": 1.5937, + "step": 4076 + }, + { + "epoch": 0.6996739316972713, + "grad_norm": 3.399511519988169, + "learning_rate": 4.369424014117209e-06, + "loss": 1.6229, + "step": 4077 + }, + { + "epoch": 0.6998455465934443, + "grad_norm": 3.7397111407194417, + "learning_rate": 4.364831346228442e-06, + "loss": 1.535, + "step": 4078 + }, + { + "epoch": 0.7000171614896173, + "grad_norm": 3.52731546507338, + "learning_rate": 4.360240419354007e-06, + "loss": 1.4624, + "step": 4079 + }, + { + "epoch": 0.7001887763857902, + "grad_norm": 4.43507463136684, + "learning_rate": 4.355651234912299e-06, + "loss": 1.6445, + "step": 4080 + }, + { + "epoch": 0.7003603912819633, + "grad_norm": 3.7480208567212716, + "learning_rate": 4.3510637943211656e-06, + "loss": 1.5341, + "step": 4081 + }, + { + "epoch": 0.7005320061781363, + "grad_norm": 3.694172971813163, + "learning_rate": 4.346478098997917e-06, + "loss": 1.6232, + "step": 4082 + }, + { + "epoch": 0.7007036210743093, + "grad_norm": 3.266704945075871, + "learning_rate": 4.341894150359332e-06, + "loss": 1.5689, + "step": 4083 + }, + { + "epoch": 0.7008752359704823, + "grad_norm": 3.455235367572192, + "learning_rate": 4.337311949821642e-06, + "loss": 1.6549, + "step": 4084 + }, + { + "epoch": 0.7010468508666552, + "grad_norm": 3.7691337026217604, + "learning_rate": 4.3327314988005475e-06, + "loss": 1.4269, + "step": 4085 + }, + { + "epoch": 0.7012184657628282, + "grad_norm": 3.7565675013125857, + "learning_rate": 4.328152798711196e-06, + "loss": 1.5472, + "step": 4086 + }, + { + "epoch": 0.7013900806590012, + "grad_norm": 3.3930873798837067, + "learning_rate": 4.323575850968206e-06, + "loss": 1.5944, + "step": 4087 + }, + { + "epoch": 0.7015616955551742, + "grad_norm": 3.6312343951114032, + "learning_rate": 4.319000656985641e-06, + "loss": 1.5139, + "step": 4088 + }, + { + "epoch": 0.7017333104513471, + "grad_norm": 4.0170682433801765, + "learning_rate": 4.314427218177037e-06, + "loss": 1.4584, + "step": 4089 + }, + { + "epoch": 0.7019049253475201, + "grad_norm": 3.3218281178629168, + "learning_rate": 4.309855535955383e-06, + "loss": 1.3446, + "step": 4090 + }, + { + "epoch": 0.7020765402436931, + "grad_norm": 3.63206462801692, + "learning_rate": 4.305285611733117e-06, + "loss": 1.5769, + "step": 4091 + }, + { + "epoch": 0.7022481551398662, + "grad_norm": 3.8142679342141164, + "learning_rate": 4.300717446922144e-06, + "loss": 1.7094, + "step": 4092 + }, + { + "epoch": 0.7024197700360392, + "grad_norm": 3.64003018876382, + "learning_rate": 4.296151042933826e-06, + "loss": 1.272, + "step": 4093 + }, + { + "epoch": 0.7025913849322121, + "grad_norm": 3.604322506991338, + "learning_rate": 4.29158640117897e-06, + "loss": 1.6433, + "step": 4094 + }, + { + "epoch": 0.7027629998283851, + "grad_norm": 3.2995988187359124, + "learning_rate": 4.287023523067853e-06, + "loss": 1.5728, + "step": 4095 + }, + { + "epoch": 0.7029346147245581, + "grad_norm": 3.334511565697689, + "learning_rate": 4.28246241001019e-06, + "loss": 1.564, + "step": 4096 + }, + { + "epoch": 0.7031062296207311, + "grad_norm": 4.081831995536258, + "learning_rate": 4.277903063415167e-06, + "loss": 1.474, + "step": 4097 + }, + { + "epoch": 0.7032778445169041, + "grad_norm": 3.578665855917808, + "learning_rate": 4.273345484691416e-06, + "loss": 1.5292, + "step": 4098 + }, + { + "epoch": 0.703449459413077, + "grad_norm": 3.5301537985466687, + "learning_rate": 4.268789675247029e-06, + "loss": 1.4884, + "step": 4099 + }, + { + "epoch": 0.70362107430925, + "grad_norm": 3.9266155593547385, + "learning_rate": 4.264235636489542e-06, + "loss": 1.5206, + "step": 4100 + }, + { + "epoch": 0.703792689205423, + "grad_norm": 3.6417470684530726, + "learning_rate": 4.259683369825945e-06, + "loss": 1.6743, + "step": 4101 + }, + { + "epoch": 0.703964304101596, + "grad_norm": 4.112793970090927, + "learning_rate": 4.25513287666269e-06, + "loss": 1.5537, + "step": 4102 + }, + { + "epoch": 0.7041359189977691, + "grad_norm": 3.8793763042043365, + "learning_rate": 4.250584158405673e-06, + "loss": 1.544, + "step": 4103 + }, + { + "epoch": 0.704307533893942, + "grad_norm": 4.2882187494149235, + "learning_rate": 4.246037216460248e-06, + "loss": 1.6455, + "step": 4104 + }, + { + "epoch": 0.704479148790115, + "grad_norm": 4.237907550307505, + "learning_rate": 4.241492052231213e-06, + "loss": 1.4795, + "step": 4105 + }, + { + "epoch": 0.704650763686288, + "grad_norm": 4.150333992739989, + "learning_rate": 4.236948667122815e-06, + "loss": 1.5497, + "step": 4106 + }, + { + "epoch": 0.704822378582461, + "grad_norm": 3.6604784973274636, + "learning_rate": 4.232407062538761e-06, + "loss": 1.4729, + "step": 4107 + }, + { + "epoch": 0.7049939934786339, + "grad_norm": 3.89555778492882, + "learning_rate": 4.227867239882203e-06, + "loss": 1.5899, + "step": 4108 + }, + { + "epoch": 0.7051656083748069, + "grad_norm": 3.8994460047266943, + "learning_rate": 4.223329200555747e-06, + "loss": 1.6008, + "step": 4109 + }, + { + "epoch": 0.7053372232709799, + "grad_norm": 3.2922178156879975, + "learning_rate": 4.218792945961434e-06, + "loss": 1.5273, + "step": 4110 + }, + { + "epoch": 0.7055088381671529, + "grad_norm": 3.2643824493793194, + "learning_rate": 4.214258477500772e-06, + "loss": 1.5119, + "step": 4111 + }, + { + "epoch": 0.7056804530633259, + "grad_norm": 3.6014945434698267, + "learning_rate": 4.2097257965747005e-06, + "loss": 1.4116, + "step": 4112 + }, + { + "epoch": 0.7058520679594988, + "grad_norm": 3.850852055560369, + "learning_rate": 4.205194904583619e-06, + "loss": 1.5465, + "step": 4113 + }, + { + "epoch": 0.7060236828556719, + "grad_norm": 4.299358524114042, + "learning_rate": 4.200665802927373e-06, + "loss": 1.4715, + "step": 4114 + }, + { + "epoch": 0.7061952977518449, + "grad_norm": 3.472152201839758, + "learning_rate": 4.196138493005245e-06, + "loss": 1.5562, + "step": 4115 + }, + { + "epoch": 0.7063669126480179, + "grad_norm": 4.465213204247648, + "learning_rate": 4.191612976215973e-06, + "loss": 1.722, + "step": 4116 + }, + { + "epoch": 0.7065385275441909, + "grad_norm": 3.983595751429719, + "learning_rate": 4.1870892539577435e-06, + "loss": 1.5408, + "step": 4117 + }, + { + "epoch": 0.7067101424403638, + "grad_norm": 3.964024583986105, + "learning_rate": 4.182567327628177e-06, + "loss": 1.6629, + "step": 4118 + }, + { + "epoch": 0.7068817573365368, + "grad_norm": 3.658129939311069, + "learning_rate": 4.178047198624351e-06, + "loss": 1.3729, + "step": 4119 + }, + { + "epoch": 0.7070533722327098, + "grad_norm": 3.9613033011493473, + "learning_rate": 4.173528868342777e-06, + "loss": 1.5596, + "step": 4120 + }, + { + "epoch": 0.7072249871288828, + "grad_norm": 3.8130375825594163, + "learning_rate": 4.16901233817942e-06, + "loss": 1.47, + "step": 4121 + }, + { + "epoch": 0.7073966020250557, + "grad_norm": 3.4381252294591302, + "learning_rate": 4.164497609529685e-06, + "loss": 1.6529, + "step": 4122 + }, + { + "epoch": 0.7075682169212287, + "grad_norm": 3.2021211782658074, + "learning_rate": 4.159984683788424e-06, + "loss": 1.4202, + "step": 4123 + }, + { + "epoch": 0.7077398318174017, + "grad_norm": 4.073196476611487, + "learning_rate": 4.1554735623499244e-06, + "loss": 1.6303, + "step": 4124 + }, + { + "epoch": 0.7079114467135748, + "grad_norm": 3.364889360925374, + "learning_rate": 4.150964246607918e-06, + "loss": 1.5816, + "step": 4125 + }, + { + "epoch": 0.7080830616097478, + "grad_norm": 3.545722653185063, + "learning_rate": 4.1464567379555846e-06, + "loss": 1.4743, + "step": 4126 + }, + { + "epoch": 0.7082546765059207, + "grad_norm": 3.8830091650422114, + "learning_rate": 4.141951037785541e-06, + "loss": 1.8062, + "step": 4127 + }, + { + "epoch": 0.7084262914020937, + "grad_norm": 4.245335656554135, + "learning_rate": 4.1374471474898514e-06, + "loss": 1.4273, + "step": 4128 + }, + { + "epoch": 0.7085979062982667, + "grad_norm": 3.958793628954796, + "learning_rate": 4.132945068460011e-06, + "loss": 1.6064, + "step": 4129 + }, + { + "epoch": 0.7087695211944397, + "grad_norm": 4.658720578637188, + "learning_rate": 4.128444802086958e-06, + "loss": 1.6416, + "step": 4130 + }, + { + "epoch": 0.7089411360906127, + "grad_norm": 4.419496815867452, + "learning_rate": 4.123946349761075e-06, + "loss": 1.5202, + "step": 4131 + }, + { + "epoch": 0.7091127509867856, + "grad_norm": 3.9191607510646786, + "learning_rate": 4.1194497128721846e-06, + "loss": 1.6093, + "step": 4132 + }, + { + "epoch": 0.7092843658829586, + "grad_norm": 3.802835223864519, + "learning_rate": 4.114954892809545e-06, + "loss": 1.3616, + "step": 4133 + }, + { + "epoch": 0.7094559807791316, + "grad_norm": 4.804130466472091, + "learning_rate": 4.110461890961851e-06, + "loss": 1.5285, + "step": 4134 + }, + { + "epoch": 0.7096275956753046, + "grad_norm": 3.5210648795658868, + "learning_rate": 4.105970708717244e-06, + "loss": 1.7624, + "step": 4135 + }, + { + "epoch": 0.7097992105714777, + "grad_norm": 4.190089036123137, + "learning_rate": 4.101481347463291e-06, + "loss": 1.6321, + "step": 4136 + }, + { + "epoch": 0.7099708254676506, + "grad_norm": 3.844657666967626, + "learning_rate": 4.096993808587007e-06, + "loss": 1.5408, + "step": 4137 + }, + { + "epoch": 0.7101424403638236, + "grad_norm": 3.3821232935527044, + "learning_rate": 4.092508093474841e-06, + "loss": 1.6364, + "step": 4138 + }, + { + "epoch": 0.7103140552599966, + "grad_norm": 3.8616701090670578, + "learning_rate": 4.088024203512676e-06, + "loss": 1.7954, + "step": 4139 + }, + { + "epoch": 0.7104856701561696, + "grad_norm": 3.9356325259720935, + "learning_rate": 4.083542140085832e-06, + "loss": 1.8199, + "step": 4140 + }, + { + "epoch": 0.7106572850523425, + "grad_norm": 3.237348938620819, + "learning_rate": 4.079061904579071e-06, + "loss": 1.2986, + "step": 4141 + }, + { + "epoch": 0.7108288999485155, + "grad_norm": 3.7419993517779364, + "learning_rate": 4.074583498376577e-06, + "loss": 1.5519, + "step": 4142 + }, + { + "epoch": 0.7110005148446885, + "grad_norm": 3.354023949317062, + "learning_rate": 4.070106922861985e-06, + "loss": 1.6365, + "step": 4143 + }, + { + "epoch": 0.7111721297408615, + "grad_norm": 4.031039241130169, + "learning_rate": 4.065632179418348e-06, + "loss": 1.6337, + "step": 4144 + }, + { + "epoch": 0.7113437446370345, + "grad_norm": 4.066715301336402, + "learning_rate": 4.061159269428165e-06, + "loss": 1.728, + "step": 4145 + }, + { + "epoch": 0.7115153595332074, + "grad_norm": 3.897809089063765, + "learning_rate": 4.056688194273362e-06, + "loss": 1.6542, + "step": 4146 + }, + { + "epoch": 0.7116869744293804, + "grad_norm": 4.4778195751932826, + "learning_rate": 4.0522189553353075e-06, + "loss": 1.4832, + "step": 4147 + }, + { + "epoch": 0.7118585893255535, + "grad_norm": 3.181424112780336, + "learning_rate": 4.047751553994791e-06, + "loss": 1.3256, + "step": 4148 + }, + { + "epoch": 0.7120302042217265, + "grad_norm": 3.3504020389732947, + "learning_rate": 4.043285991632035e-06, + "loss": 1.4827, + "step": 4149 + }, + { + "epoch": 0.7122018191178995, + "grad_norm": 3.3747065996638423, + "learning_rate": 4.038822269626703e-06, + "loss": 1.3242, + "step": 4150 + }, + { + "epoch": 0.7123734340140724, + "grad_norm": 4.009696666042803, + "learning_rate": 4.034360389357883e-06, + "loss": 1.4059, + "step": 4151 + }, + { + "epoch": 0.7125450489102454, + "grad_norm": 4.102033684209833, + "learning_rate": 4.029900352204102e-06, + "loss": 1.5107, + "step": 4152 + }, + { + "epoch": 0.7127166638064184, + "grad_norm": 3.00024042828498, + "learning_rate": 4.0254421595433045e-06, + "loss": 1.2681, + "step": 4153 + }, + { + "epoch": 0.7128882787025914, + "grad_norm": 3.216592227468947, + "learning_rate": 4.020985812752871e-06, + "loss": 1.6086, + "step": 4154 + }, + { + "epoch": 0.7130598935987644, + "grad_norm": 3.654491742778158, + "learning_rate": 4.016531313209616e-06, + "loss": 1.6507, + "step": 4155 + }, + { + "epoch": 0.7132315084949373, + "grad_norm": 3.2104212583477416, + "learning_rate": 4.01207866228978e-06, + "loss": 1.3259, + "step": 4156 + }, + { + "epoch": 0.7134031233911103, + "grad_norm": 4.413950328751978, + "learning_rate": 4.007627861369034e-06, + "loss": 1.5079, + "step": 4157 + }, + { + "epoch": 0.7135747382872833, + "grad_norm": 3.9291451747795056, + "learning_rate": 4.003178911822472e-06, + "loss": 1.6011, + "step": 4158 + }, + { + "epoch": 0.7137463531834564, + "grad_norm": 3.6307195680910533, + "learning_rate": 3.9987318150246235e-06, + "loss": 1.5678, + "step": 4159 + }, + { + "epoch": 0.7139179680796293, + "grad_norm": 3.649961139626407, + "learning_rate": 3.994286572349438e-06, + "loss": 1.4753, + "step": 4160 + }, + { + "epoch": 0.7140895829758023, + "grad_norm": 3.905880519082177, + "learning_rate": 3.989843185170298e-06, + "loss": 1.7013, + "step": 4161 + }, + { + "epoch": 0.7142611978719753, + "grad_norm": 4.337249873636874, + "learning_rate": 3.985401654860014e-06, + "loss": 1.4474, + "step": 4162 + }, + { + "epoch": 0.7144328127681483, + "grad_norm": 3.7770609259127053, + "learning_rate": 3.980961982790813e-06, + "loss": 1.4295, + "step": 4163 + }, + { + "epoch": 0.7146044276643213, + "grad_norm": 3.747623510264429, + "learning_rate": 3.976524170334358e-06, + "loss": 1.4052, + "step": 4164 + }, + { + "epoch": 0.7147760425604942, + "grad_norm": 3.544264003828599, + "learning_rate": 3.972088218861738e-06, + "loss": 1.347, + "step": 4165 + }, + { + "epoch": 0.7149476574566672, + "grad_norm": 3.3736560903106207, + "learning_rate": 3.967654129743457e-06, + "loss": 1.7092, + "step": 4166 + }, + { + "epoch": 0.7151192723528402, + "grad_norm": 3.5771774790171027, + "learning_rate": 3.963221904349454e-06, + "loss": 1.5427, + "step": 4167 + }, + { + "epoch": 0.7152908872490132, + "grad_norm": 3.392893352972987, + "learning_rate": 3.958791544049082e-06, + "loss": 1.4877, + "step": 4168 + }, + { + "epoch": 0.7154625021451863, + "grad_norm": 3.290042003772946, + "learning_rate": 3.9543630502111266e-06, + "loss": 1.3702, + "step": 4169 + }, + { + "epoch": 0.7156341170413592, + "grad_norm": 3.692144051494739, + "learning_rate": 3.949936424203795e-06, + "loss": 1.5409, + "step": 4170 + }, + { + "epoch": 0.7158057319375322, + "grad_norm": 3.552685893909201, + "learning_rate": 3.945511667394719e-06, + "loss": 1.5695, + "step": 4171 + }, + { + "epoch": 0.7159773468337052, + "grad_norm": 4.186359995509498, + "learning_rate": 3.9410887811509465e-06, + "loss": 1.6776, + "step": 4172 + }, + { + "epoch": 0.7161489617298782, + "grad_norm": 3.863766662151149, + "learning_rate": 3.936667766838947e-06, + "loss": 1.8969, + "step": 4173 + }, + { + "epoch": 0.7163205766260511, + "grad_norm": 4.012929081461003, + "learning_rate": 3.9322486258246195e-06, + "loss": 1.6432, + "step": 4174 + }, + { + "epoch": 0.7164921915222241, + "grad_norm": 3.715953431646086, + "learning_rate": 3.927831359473281e-06, + "loss": 1.4083, + "step": 4175 + }, + { + "epoch": 0.7166638064183971, + "grad_norm": 2.982423299890264, + "learning_rate": 3.923415969149671e-06, + "loss": 1.666, + "step": 4176 + }, + { + "epoch": 0.7168354213145701, + "grad_norm": 3.997966294065927, + "learning_rate": 3.919002456217944e-06, + "loss": 1.5069, + "step": 4177 + }, + { + "epoch": 0.7170070362107431, + "grad_norm": 3.5570816577556554, + "learning_rate": 3.914590822041673e-06, + "loss": 1.6799, + "step": 4178 + }, + { + "epoch": 0.717178651106916, + "grad_norm": 3.441927183853842, + "learning_rate": 3.91018106798386e-06, + "loss": 1.4168, + "step": 4179 + }, + { + "epoch": 0.717350266003089, + "grad_norm": 3.886653666454556, + "learning_rate": 3.9057731954069225e-06, + "loss": 1.6133, + "step": 4180 + }, + { + "epoch": 0.717521880899262, + "grad_norm": 3.470311096561256, + "learning_rate": 3.901367205672695e-06, + "loss": 1.5119, + "step": 4181 + }, + { + "epoch": 0.7176934957954351, + "grad_norm": 4.073690221848995, + "learning_rate": 3.896963100142427e-06, + "loss": 1.4679, + "step": 4182 + }, + { + "epoch": 0.7178651106916081, + "grad_norm": 3.8517048673548637, + "learning_rate": 3.892560880176795e-06, + "loss": 1.4875, + "step": 4183 + }, + { + "epoch": 0.718036725587781, + "grad_norm": 3.794484124652863, + "learning_rate": 3.888160547135881e-06, + "loss": 1.5112, + "step": 4184 + }, + { + "epoch": 0.718208340483954, + "grad_norm": 3.752988556760734, + "learning_rate": 3.883762102379193e-06, + "loss": 1.5393, + "step": 4185 + }, + { + "epoch": 0.718379955380127, + "grad_norm": 3.9074788540079135, + "learning_rate": 3.8793655472656585e-06, + "loss": 1.3333, + "step": 4186 + }, + { + "epoch": 0.7185515702763, + "grad_norm": 3.9272319866250984, + "learning_rate": 3.874970883153607e-06, + "loss": 1.4151, + "step": 4187 + }, + { + "epoch": 0.718723185172473, + "grad_norm": 3.857044572606296, + "learning_rate": 3.870578111400798e-06, + "loss": 1.6884, + "step": 4188 + }, + { + "epoch": 0.7188948000686459, + "grad_norm": 3.8667492396619743, + "learning_rate": 3.866187233364402e-06, + "loss": 1.7396, + "step": 4189 + }, + { + "epoch": 0.7190664149648189, + "grad_norm": 4.05232268287602, + "learning_rate": 3.861798250400998e-06, + "loss": 1.5502, + "step": 4190 + }, + { + "epoch": 0.719238029860992, + "grad_norm": 4.056784155266631, + "learning_rate": 3.857411163866593e-06, + "loss": 1.765, + "step": 4191 + }, + { + "epoch": 0.719409644757165, + "grad_norm": 4.2670263815396, + "learning_rate": 3.853025975116591e-06, + "loss": 1.6919, + "step": 4192 + }, + { + "epoch": 0.7195812596533379, + "grad_norm": 4.344422849915019, + "learning_rate": 3.8486426855058214e-06, + "loss": 1.79, + "step": 4193 + }, + { + "epoch": 0.7197528745495109, + "grad_norm": 3.6431004901370896, + "learning_rate": 3.844261296388526e-06, + "loss": 1.5082, + "step": 4194 + }, + { + "epoch": 0.7199244894456839, + "grad_norm": 4.427966420765549, + "learning_rate": 3.83988180911836e-06, + "loss": 1.4694, + "step": 4195 + }, + { + "epoch": 0.7200961043418569, + "grad_norm": 4.580751526339576, + "learning_rate": 3.835504225048385e-06, + "loss": 1.4903, + "step": 4196 + }, + { + "epoch": 0.7202677192380299, + "grad_norm": 3.718492663068725, + "learning_rate": 3.831128545531076e-06, + "loss": 1.4353, + "step": 4197 + }, + { + "epoch": 0.7204393341342028, + "grad_norm": 3.7623360213805883, + "learning_rate": 3.8267547719183225e-06, + "loss": 1.4407, + "step": 4198 + }, + { + "epoch": 0.7206109490303758, + "grad_norm": 2.8888406331867293, + "learning_rate": 3.822382905561428e-06, + "loss": 1.3856, + "step": 4199 + }, + { + "epoch": 0.7207825639265488, + "grad_norm": 3.9904595803750644, + "learning_rate": 3.818012947811103e-06, + "loss": 1.6361, + "step": 4200 + }, + { + "epoch": 0.7209541788227218, + "grad_norm": 4.40424436074814, + "learning_rate": 3.8136449000174667e-06, + "loss": 1.5679, + "step": 4201 + }, + { + "epoch": 0.7211257937188948, + "grad_norm": 4.3122044623779985, + "learning_rate": 3.8092787635300455e-06, + "loss": 1.6252, + "step": 4202 + }, + { + "epoch": 0.7212974086150677, + "grad_norm": 3.0944638527457875, + "learning_rate": 3.804914539697785e-06, + "loss": 1.2539, + "step": 4203 + }, + { + "epoch": 0.7214690235112408, + "grad_norm": 3.4475272908740187, + "learning_rate": 3.8005522298690333e-06, + "loss": 1.3981, + "step": 4204 + }, + { + "epoch": 0.7216406384074138, + "grad_norm": 3.553179655067805, + "learning_rate": 3.796191835391552e-06, + "loss": 1.5454, + "step": 4205 + }, + { + "epoch": 0.7218122533035868, + "grad_norm": 3.711527962389041, + "learning_rate": 3.7918333576124997e-06, + "loss": 1.5268, + "step": 4206 + }, + { + "epoch": 0.7219838681997598, + "grad_norm": 4.226263184512579, + "learning_rate": 3.787476797878459e-06, + "loss": 1.6009, + "step": 4207 + }, + { + "epoch": 0.7221554830959327, + "grad_norm": 3.4436668630869995, + "learning_rate": 3.7831221575354036e-06, + "loss": 1.6827, + "step": 4208 + }, + { + "epoch": 0.7223270979921057, + "grad_norm": 3.699911617745991, + "learning_rate": 3.7787694379287256e-06, + "loss": 1.5477, + "step": 4209 + }, + { + "epoch": 0.7224987128882787, + "grad_norm": 3.8847746616409933, + "learning_rate": 3.7744186404032233e-06, + "loss": 1.7099, + "step": 4210 + }, + { + "epoch": 0.7226703277844517, + "grad_norm": 4.239215143982443, + "learning_rate": 3.7700697663030916e-06, + "loss": 1.5431, + "step": 4211 + }, + { + "epoch": 0.7228419426806246, + "grad_norm": 3.8365329567483775, + "learning_rate": 3.765722816971942e-06, + "loss": 1.4774, + "step": 4212 + }, + { + "epoch": 0.7230135575767976, + "grad_norm": 3.444829700041154, + "learning_rate": 3.7613777937527873e-06, + "loss": 1.3581, + "step": 4213 + }, + { + "epoch": 0.7231851724729706, + "grad_norm": 4.17790174750879, + "learning_rate": 3.7570346979880412e-06, + "loss": 1.4967, + "step": 4214 + }, + { + "epoch": 0.7233567873691437, + "grad_norm": 5.598203431995362, + "learning_rate": 3.75269353101953e-06, + "loss": 1.5852, + "step": 4215 + }, + { + "epoch": 0.7235284022653167, + "grad_norm": 3.7113210566465127, + "learning_rate": 3.7483542941884733e-06, + "loss": 1.3141, + "step": 4216 + }, + { + "epoch": 0.7237000171614896, + "grad_norm": 3.694554808473566, + "learning_rate": 3.744016988835504e-06, + "loss": 1.4322, + "step": 4217 + }, + { + "epoch": 0.7238716320576626, + "grad_norm": 4.118538554416828, + "learning_rate": 3.7396816163006554e-06, + "loss": 1.615, + "step": 4218 + }, + { + "epoch": 0.7240432469538356, + "grad_norm": 3.687426293700273, + "learning_rate": 3.735348177923367e-06, + "loss": 1.4754, + "step": 4219 + }, + { + "epoch": 0.7242148618500086, + "grad_norm": 3.246083589221187, + "learning_rate": 3.7310166750424727e-06, + "loss": 1.5051, + "step": 4220 + }, + { + "epoch": 0.7243864767461816, + "grad_norm": 3.6638808136753775, + "learning_rate": 3.7266871089962086e-06, + "loss": 1.5304, + "step": 4221 + }, + { + "epoch": 0.7245580916423545, + "grad_norm": 3.6315075448144403, + "learning_rate": 3.7223594811222197e-06, + "loss": 1.4357, + "step": 4222 + }, + { + "epoch": 0.7247297065385275, + "grad_norm": 3.843547297793905, + "learning_rate": 3.7180337927575507e-06, + "loss": 1.5876, + "step": 4223 + }, + { + "epoch": 0.7249013214347005, + "grad_norm": 3.582255070343444, + "learning_rate": 3.7137100452386478e-06, + "loss": 1.4231, + "step": 4224 + }, + { + "epoch": 0.7250729363308736, + "grad_norm": 4.068006016681625, + "learning_rate": 3.7093882399013504e-06, + "loss": 1.615, + "step": 4225 + }, + { + "epoch": 0.7252445512270465, + "grad_norm": 3.6184958776841585, + "learning_rate": 3.7050683780809016e-06, + "loss": 1.501, + "step": 4226 + }, + { + "epoch": 0.7254161661232195, + "grad_norm": 3.488722342137974, + "learning_rate": 3.700750461111945e-06, + "loss": 1.5459, + "step": 4227 + }, + { + "epoch": 0.7255877810193925, + "grad_norm": 3.9436530743783336, + "learning_rate": 3.6964344903285267e-06, + "loss": 1.4607, + "step": 4228 + }, + { + "epoch": 0.7257593959155655, + "grad_norm": 4.540621473782923, + "learning_rate": 3.6921204670640886e-06, + "loss": 1.6634, + "step": 4229 + }, + { + "epoch": 0.7259310108117385, + "grad_norm": 3.727330336649548, + "learning_rate": 3.6878083926514654e-06, + "loss": 1.6113, + "step": 4230 + }, + { + "epoch": 0.7261026257079114, + "grad_norm": 3.694818917390779, + "learning_rate": 3.6834982684229003e-06, + "loss": 1.5466, + "step": 4231 + }, + { + "epoch": 0.7262742406040844, + "grad_norm": 3.8740477659751558, + "learning_rate": 3.6791900957100224e-06, + "loss": 1.5952, + "step": 4232 + }, + { + "epoch": 0.7264458555002574, + "grad_norm": 4.374677312714116, + "learning_rate": 3.674883875843868e-06, + "loss": 1.4432, + "step": 4233 + }, + { + "epoch": 0.7266174703964304, + "grad_norm": 4.811934275320881, + "learning_rate": 3.6705796101548664e-06, + "loss": 1.527, + "step": 4234 + }, + { + "epoch": 0.7267890852926034, + "grad_norm": 3.9073849723155996, + "learning_rate": 3.6662772999728392e-06, + "loss": 1.4628, + "step": 4235 + }, + { + "epoch": 0.7269607001887763, + "grad_norm": 4.108912474902295, + "learning_rate": 3.661976946627007e-06, + "loss": 1.5339, + "step": 4236 + }, + { + "epoch": 0.7271323150849494, + "grad_norm": 4.476139117236807, + "learning_rate": 3.6576785514459933e-06, + "loss": 1.7686, + "step": 4237 + }, + { + "epoch": 0.7273039299811224, + "grad_norm": 3.832367535708342, + "learning_rate": 3.653382115757801e-06, + "loss": 1.6848, + "step": 4238 + }, + { + "epoch": 0.7274755448772954, + "grad_norm": 3.9488026516957175, + "learning_rate": 3.649087640889841e-06, + "loss": 1.4907, + "step": 4239 + }, + { + "epoch": 0.7276471597734684, + "grad_norm": 4.074351772137394, + "learning_rate": 3.6447951281689097e-06, + "loss": 1.3454, + "step": 4240 + }, + { + "epoch": 0.7278187746696413, + "grad_norm": 3.6569211851443217, + "learning_rate": 3.6405045789212025e-06, + "loss": 1.5901, + "step": 4241 + }, + { + "epoch": 0.7279903895658143, + "grad_norm": 3.545716979705867, + "learning_rate": 3.6362159944723075e-06, + "loss": 1.4607, + "step": 4242 + }, + { + "epoch": 0.7281620044619873, + "grad_norm": 4.375404581666328, + "learning_rate": 3.6319293761472073e-06, + "loss": 1.5912, + "step": 4243 + }, + { + "epoch": 0.7283336193581603, + "grad_norm": 4.0792360801882435, + "learning_rate": 3.627644725270272e-06, + "loss": 1.7369, + "step": 4244 + }, + { + "epoch": 0.7285052342543332, + "grad_norm": 4.326875157272401, + "learning_rate": 3.623362043165263e-06, + "loss": 1.6896, + "step": 4245 + }, + { + "epoch": 0.7286768491505062, + "grad_norm": 4.236716240356844, + "learning_rate": 3.61908133115534e-06, + "loss": 1.6729, + "step": 4246 + }, + { + "epoch": 0.7288484640466792, + "grad_norm": 3.930875256474182, + "learning_rate": 3.614802590563051e-06, + "loss": 1.606, + "step": 4247 + }, + { + "epoch": 0.7290200789428523, + "grad_norm": 3.9124333770742235, + "learning_rate": 3.610525822710339e-06, + "loss": 1.5453, + "step": 4248 + }, + { + "epoch": 0.7291916938390253, + "grad_norm": 3.775756731600947, + "learning_rate": 3.6062510289185305e-06, + "loss": 1.3937, + "step": 4249 + }, + { + "epoch": 0.7293633087351982, + "grad_norm": 4.461266934920697, + "learning_rate": 3.601978210508339e-06, + "loss": 1.6767, + "step": 4250 + }, + { + "epoch": 0.7295349236313712, + "grad_norm": 4.162555711148938, + "learning_rate": 3.597707368799881e-06, + "loss": 1.6228, + "step": 4251 + }, + { + "epoch": 0.7297065385275442, + "grad_norm": 3.467984933239619, + "learning_rate": 3.5934385051126507e-06, + "loss": 1.4967, + "step": 4252 + }, + { + "epoch": 0.7298781534237172, + "grad_norm": 3.1445468777807255, + "learning_rate": 3.5891716207655423e-06, + "loss": 1.4428, + "step": 4253 + }, + { + "epoch": 0.7300497683198902, + "grad_norm": 4.31484890176354, + "learning_rate": 3.5849067170768237e-06, + "loss": 1.6264, + "step": 4254 + }, + { + "epoch": 0.7302213832160631, + "grad_norm": 5.075828693659018, + "learning_rate": 3.580643795364166e-06, + "loss": 1.4281, + "step": 4255 + }, + { + "epoch": 0.7303929981122361, + "grad_norm": 4.245735053069527, + "learning_rate": 3.5763828569446122e-06, + "loss": 1.447, + "step": 4256 + }, + { + "epoch": 0.7305646130084091, + "grad_norm": 3.5932678574319006, + "learning_rate": 3.5721239031346067e-06, + "loss": 1.559, + "step": 4257 + }, + { + "epoch": 0.7307362279045821, + "grad_norm": 4.0156421473151855, + "learning_rate": 3.5678669352499783e-06, + "loss": 1.5363, + "step": 4258 + }, + { + "epoch": 0.7309078428007552, + "grad_norm": 3.9641798252776175, + "learning_rate": 3.563611954605931e-06, + "loss": 1.5863, + "step": 4259 + }, + { + "epoch": 0.7310794576969281, + "grad_norm": 3.6682740174340145, + "learning_rate": 3.5593589625170668e-06, + "loss": 1.5641, + "step": 4260 + }, + { + "epoch": 0.7312510725931011, + "grad_norm": 3.700370241280364, + "learning_rate": 3.5551079602973735e-06, + "loss": 1.4792, + "step": 4261 + }, + { + "epoch": 0.7314226874892741, + "grad_norm": 3.6183367563133952, + "learning_rate": 3.5508589492602143e-06, + "loss": 1.522, + "step": 4262 + }, + { + "epoch": 0.7315943023854471, + "grad_norm": 3.226406926106017, + "learning_rate": 3.5466119307183465e-06, + "loss": 1.3982, + "step": 4263 + }, + { + "epoch": 0.73176591728162, + "grad_norm": 3.7197236361283026, + "learning_rate": 3.542366905983905e-06, + "loss": 1.3868, + "step": 4264 + }, + { + "epoch": 0.731937532177793, + "grad_norm": 4.219786376061305, + "learning_rate": 3.5381238763684134e-06, + "loss": 1.5433, + "step": 4265 + }, + { + "epoch": 0.732109147073966, + "grad_norm": 4.20728905880942, + "learning_rate": 3.5338828431827777e-06, + "loss": 1.5269, + "step": 4266 + }, + { + "epoch": 0.732280761970139, + "grad_norm": 3.7356235183249686, + "learning_rate": 3.5296438077372908e-06, + "loss": 1.4627, + "step": 4267 + }, + { + "epoch": 0.732452376866312, + "grad_norm": 3.9121336191067004, + "learning_rate": 3.5254067713416218e-06, + "loss": 1.4723, + "step": 4268 + }, + { + "epoch": 0.7326239917624849, + "grad_norm": 3.9774343987033003, + "learning_rate": 3.5211717353048204e-06, + "loss": 1.3861, + "step": 4269 + }, + { + "epoch": 0.732795606658658, + "grad_norm": 3.683169633164081, + "learning_rate": 3.516938700935325e-06, + "loss": 1.3967, + "step": 4270 + }, + { + "epoch": 0.732967221554831, + "grad_norm": 3.8669803013588315, + "learning_rate": 3.5127076695409568e-06, + "loss": 1.3631, + "step": 4271 + }, + { + "epoch": 0.733138836451004, + "grad_norm": 4.060345716869961, + "learning_rate": 3.508478642428914e-06, + "loss": 1.6325, + "step": 4272 + }, + { + "epoch": 0.733310451347177, + "grad_norm": 3.3206224113874425, + "learning_rate": 3.5042516209057763e-06, + "loss": 1.3753, + "step": 4273 + }, + { + "epoch": 0.7334820662433499, + "grad_norm": 3.2209484177252454, + "learning_rate": 3.500026606277499e-06, + "loss": 1.3568, + "step": 4274 + }, + { + "epoch": 0.7336536811395229, + "grad_norm": 3.8786046629031325, + "learning_rate": 3.4958035998494255e-06, + "loss": 1.5166, + "step": 4275 + }, + { + "epoch": 0.7338252960356959, + "grad_norm": 3.8555135318509115, + "learning_rate": 3.4915826029262745e-06, + "loss": 1.6155, + "step": 4276 + }, + { + "epoch": 0.7339969109318689, + "grad_norm": 3.979545349046841, + "learning_rate": 3.4873636168121496e-06, + "loss": 1.5755, + "step": 4277 + }, + { + "epoch": 0.7341685258280419, + "grad_norm": 3.80412037767361, + "learning_rate": 3.4831466428105196e-06, + "loss": 1.5559, + "step": 4278 + }, + { + "epoch": 0.7343401407242148, + "grad_norm": 3.6481101800033624, + "learning_rate": 3.47893168222425e-06, + "loss": 1.4232, + "step": 4279 + }, + { + "epoch": 0.7345117556203878, + "grad_norm": 4.380020561943973, + "learning_rate": 3.4747187363555655e-06, + "loss": 1.5165, + "step": 4280 + }, + { + "epoch": 0.7346833705165609, + "grad_norm": 4.2090574363288535, + "learning_rate": 3.4705078065060804e-06, + "loss": 1.5358, + "step": 4281 + }, + { + "epoch": 0.7348549854127339, + "grad_norm": 4.2454950701074, + "learning_rate": 3.4662988939767885e-06, + "loss": 1.7111, + "step": 4282 + }, + { + "epoch": 0.7350266003089068, + "grad_norm": 4.201386951428244, + "learning_rate": 3.462092000068047e-06, + "loss": 1.7071, + "step": 4283 + }, + { + "epoch": 0.7351982152050798, + "grad_norm": 3.9244992096232396, + "learning_rate": 3.4578871260796e-06, + "loss": 1.5966, + "step": 4284 + }, + { + "epoch": 0.7353698301012528, + "grad_norm": 3.68781316870011, + "learning_rate": 3.4536842733105702e-06, + "loss": 1.4676, + "step": 4285 + }, + { + "epoch": 0.7355414449974258, + "grad_norm": 4.1067657843468846, + "learning_rate": 3.4494834430594426e-06, + "loss": 1.3194, + "step": 4286 + }, + { + "epoch": 0.7357130598935988, + "grad_norm": 3.6377749201365344, + "learning_rate": 3.4452846366240922e-06, + "loss": 1.5585, + "step": 4287 + }, + { + "epoch": 0.7358846747897717, + "grad_norm": 4.355868122475021, + "learning_rate": 3.4410878553017547e-06, + "loss": 1.6817, + "step": 4288 + }, + { + "epoch": 0.7360562896859447, + "grad_norm": 3.383902265050565, + "learning_rate": 3.4368931003890516e-06, + "loss": 1.522, + "step": 4289 + }, + { + "epoch": 0.7362279045821177, + "grad_norm": 4.163455615183541, + "learning_rate": 3.432700373181974e-06, + "loss": 1.5336, + "step": 4290 + }, + { + "epoch": 0.7363995194782907, + "grad_norm": 3.7482452895417167, + "learning_rate": 3.4285096749758896e-06, + "loss": 1.4256, + "step": 4291 + }, + { + "epoch": 0.7365711343744638, + "grad_norm": 4.102699385731431, + "learning_rate": 3.424321007065532e-06, + "loss": 1.5015, + "step": 4292 + }, + { + "epoch": 0.7367427492706367, + "grad_norm": 3.5676477686557293, + "learning_rate": 3.4201343707450087e-06, + "loss": 1.535, + "step": 4293 + }, + { + "epoch": 0.7369143641668097, + "grad_norm": 4.3390912734984655, + "learning_rate": 3.4159497673078067e-06, + "loss": 1.6811, + "step": 4294 + }, + { + "epoch": 0.7370859790629827, + "grad_norm": 3.9100003490084267, + "learning_rate": 3.4117671980467794e-06, + "loss": 1.4424, + "step": 4295 + }, + { + "epoch": 0.7372575939591557, + "grad_norm": 3.8109020729214955, + "learning_rate": 3.407586664254158e-06, + "loss": 1.764, + "step": 4296 + }, + { + "epoch": 0.7374292088553286, + "grad_norm": 3.7640562262112667, + "learning_rate": 3.403408167221536e-06, + "loss": 1.6261, + "step": 4297 + }, + { + "epoch": 0.7376008237515016, + "grad_norm": 4.428321995632007, + "learning_rate": 3.3992317082398774e-06, + "loss": 1.3457, + "step": 4298 + }, + { + "epoch": 0.7377724386476746, + "grad_norm": 4.0475446708955705, + "learning_rate": 3.395057288599525e-06, + "loss": 1.5551, + "step": 4299 + }, + { + "epoch": 0.7379440535438476, + "grad_norm": 3.882998035471941, + "learning_rate": 3.3908849095901865e-06, + "loss": 1.6307, + "step": 4300 + }, + { + "epoch": 0.7381156684400206, + "grad_norm": 3.5938893722636496, + "learning_rate": 3.386714572500943e-06, + "loss": 1.6644, + "step": 4301 + }, + { + "epoch": 0.7382872833361935, + "grad_norm": 4.156852828694422, + "learning_rate": 3.3825462786202367e-06, + "loss": 1.5669, + "step": 4302 + }, + { + "epoch": 0.7384588982323665, + "grad_norm": 4.9841980895273705, + "learning_rate": 3.378380029235888e-06, + "loss": 1.8169, + "step": 4303 + }, + { + "epoch": 0.7386305131285396, + "grad_norm": 4.336164860735623, + "learning_rate": 3.3742158256350756e-06, + "loss": 1.7264, + "step": 4304 + }, + { + "epoch": 0.7388021280247126, + "grad_norm": 3.548662828843063, + "learning_rate": 3.370053669104354e-06, + "loss": 1.6414, + "step": 4305 + }, + { + "epoch": 0.7389737429208856, + "grad_norm": 4.9169637546645335, + "learning_rate": 3.3658935609296472e-06, + "loss": 1.5945, + "step": 4306 + }, + { + "epoch": 0.7391453578170585, + "grad_norm": 4.520864212645562, + "learning_rate": 3.361735502396235e-06, + "loss": 1.6859, + "step": 4307 + }, + { + "epoch": 0.7393169727132315, + "grad_norm": 4.629052609733307, + "learning_rate": 3.3575794947887742e-06, + "loss": 1.6373, + "step": 4308 + }, + { + "epoch": 0.7394885876094045, + "grad_norm": 3.959878356321814, + "learning_rate": 3.3534255393912885e-06, + "loss": 1.4297, + "step": 4309 + }, + { + "epoch": 0.7396602025055775, + "grad_norm": 3.8793786961428554, + "learning_rate": 3.349273637487156e-06, + "loss": 1.6751, + "step": 4310 + }, + { + "epoch": 0.7398318174017505, + "grad_norm": 3.7592403632021174, + "learning_rate": 3.345123790359135e-06, + "loss": 1.7202, + "step": 4311 + }, + { + "epoch": 0.7400034322979234, + "grad_norm": 3.8167671447340137, + "learning_rate": 3.3409759992893377e-06, + "loss": 1.57, + "step": 4312 + }, + { + "epoch": 0.7401750471940964, + "grad_norm": 3.4155260456210894, + "learning_rate": 3.3368302655592454e-06, + "loss": 1.6994, + "step": 4313 + }, + { + "epoch": 0.7403466620902694, + "grad_norm": 4.286894002448902, + "learning_rate": 3.3326865904497065e-06, + "loss": 1.8381, + "step": 4314 + }, + { + "epoch": 0.7405182769864425, + "grad_norm": 3.936691826143307, + "learning_rate": 3.3285449752409315e-06, + "loss": 1.4833, + "step": 4315 + }, + { + "epoch": 0.7406898918826154, + "grad_norm": 4.768572173515226, + "learning_rate": 3.3244054212124934e-06, + "loss": 1.5982, + "step": 4316 + }, + { + "epoch": 0.7408615067787884, + "grad_norm": 3.7730004473871848, + "learning_rate": 3.320267929643324e-06, + "loss": 1.3591, + "step": 4317 + }, + { + "epoch": 0.7410331216749614, + "grad_norm": 3.838244071972954, + "learning_rate": 3.316132501811725e-06, + "loss": 1.4783, + "step": 4318 + }, + { + "epoch": 0.7412047365711344, + "grad_norm": 4.632103751384495, + "learning_rate": 3.3119991389953597e-06, + "loss": 1.5828, + "step": 4319 + }, + { + "epoch": 0.7413763514673074, + "grad_norm": 3.7475944488748567, + "learning_rate": 3.307867842471254e-06, + "loss": 1.4898, + "step": 4320 + }, + { + "epoch": 0.7415479663634803, + "grad_norm": 4.107224198793785, + "learning_rate": 3.30373861351579e-06, + "loss": 1.4434, + "step": 4321 + }, + { + "epoch": 0.7417195812596533, + "grad_norm": 4.043393490083267, + "learning_rate": 3.299611453404712e-06, + "loss": 1.4274, + "step": 4322 + }, + { + "epoch": 0.7418911961558263, + "grad_norm": 3.912694064358962, + "learning_rate": 3.2954863634131295e-06, + "loss": 1.4371, + "step": 4323 + }, + { + "epoch": 0.7420628110519993, + "grad_norm": 3.7840593343484916, + "learning_rate": 3.29136334481551e-06, + "loss": 1.3687, + "step": 4324 + }, + { + "epoch": 0.7422344259481723, + "grad_norm": 3.609273572627841, + "learning_rate": 3.287242398885685e-06, + "loss": 1.5283, + "step": 4325 + }, + { + "epoch": 0.7424060408443452, + "grad_norm": 4.06308237770723, + "learning_rate": 3.2831235268968363e-06, + "loss": 1.4492, + "step": 4326 + }, + { + "epoch": 0.7425776557405183, + "grad_norm": 4.048618922071043, + "learning_rate": 3.2790067301215165e-06, + "loss": 1.5934, + "step": 4327 + }, + { + "epoch": 0.7427492706366913, + "grad_norm": 4.444850487302984, + "learning_rate": 3.274892009831624e-06, + "loss": 1.6145, + "step": 4328 + }, + { + "epoch": 0.7429208855328643, + "grad_norm": 3.3165885122042553, + "learning_rate": 3.2707793672984265e-06, + "loss": 1.319, + "step": 4329 + }, + { + "epoch": 0.7430925004290373, + "grad_norm": 3.427003278499009, + "learning_rate": 3.266668803792549e-06, + "loss": 1.3144, + "step": 4330 + }, + { + "epoch": 0.7432641153252102, + "grad_norm": 4.09055139220498, + "learning_rate": 3.2625603205839652e-06, + "loss": 1.4437, + "step": 4331 + }, + { + "epoch": 0.7434357302213832, + "grad_norm": 4.422786009591833, + "learning_rate": 3.258453918942016e-06, + "loss": 1.4695, + "step": 4332 + }, + { + "epoch": 0.7436073451175562, + "grad_norm": 4.19317083241391, + "learning_rate": 3.2543496001353968e-06, + "loss": 1.6418, + "step": 4333 + }, + { + "epoch": 0.7437789600137292, + "grad_norm": 4.360149015846294, + "learning_rate": 3.2502473654321532e-06, + "loss": 1.5356, + "step": 4334 + }, + { + "epoch": 0.7439505749099021, + "grad_norm": 4.0686903831257135, + "learning_rate": 3.2461472160996965e-06, + "loss": 1.512, + "step": 4335 + }, + { + "epoch": 0.7441221898060751, + "grad_norm": 4.835696832795532, + "learning_rate": 3.242049153404784e-06, + "loss": 1.521, + "step": 4336 + }, + { + "epoch": 0.7442938047022482, + "grad_norm": 4.006478001591315, + "learning_rate": 3.2379531786135364e-06, + "loss": 1.7772, + "step": 4337 + }, + { + "epoch": 0.7444654195984212, + "grad_norm": 3.630612051867948, + "learning_rate": 3.2338592929914247e-06, + "loss": 1.6493, + "step": 4338 + }, + { + "epoch": 0.7446370344945942, + "grad_norm": 3.9455832403832547, + "learning_rate": 3.2297674978032813e-06, + "loss": 1.6707, + "step": 4339 + }, + { + "epoch": 0.7448086493907671, + "grad_norm": 4.72664428829333, + "learning_rate": 3.225677794313281e-06, + "loss": 1.5191, + "step": 4340 + }, + { + "epoch": 0.7449802642869401, + "grad_norm": 4.494412940502362, + "learning_rate": 3.2215901837849593e-06, + "loss": 1.5572, + "step": 4341 + }, + { + "epoch": 0.7451518791831131, + "grad_norm": 3.7054500488679567, + "learning_rate": 3.217504667481205e-06, + "loss": 1.5167, + "step": 4342 + }, + { + "epoch": 0.7453234940792861, + "grad_norm": 3.6406811183637937, + "learning_rate": 3.21342124666426e-06, + "loss": 1.5049, + "step": 4343 + }, + { + "epoch": 0.7454951089754591, + "grad_norm": 3.407716422457096, + "learning_rate": 3.209339922595722e-06, + "loss": 1.3556, + "step": 4344 + }, + { + "epoch": 0.745666723871632, + "grad_norm": 3.329594279822077, + "learning_rate": 3.205260696536534e-06, + "loss": 1.4475, + "step": 4345 + }, + { + "epoch": 0.745838338767805, + "grad_norm": 3.499493027641549, + "learning_rate": 3.2011835697469894e-06, + "loss": 1.424, + "step": 4346 + }, + { + "epoch": 0.746009953663978, + "grad_norm": 4.147342968307836, + "learning_rate": 3.197108543486741e-06, + "loss": 1.5368, + "step": 4347 + }, + { + "epoch": 0.746181568560151, + "grad_norm": 5.174070905740056, + "learning_rate": 3.1930356190147904e-06, + "loss": 1.727, + "step": 4348 + }, + { + "epoch": 0.746353183456324, + "grad_norm": 4.350654957498894, + "learning_rate": 3.18896479758949e-06, + "loss": 1.6562, + "step": 4349 + }, + { + "epoch": 0.746524798352497, + "grad_norm": 4.216465648780869, + "learning_rate": 3.184896080468536e-06, + "loss": 1.3828, + "step": 4350 + }, + { + "epoch": 0.74669641324867, + "grad_norm": 4.131334440721682, + "learning_rate": 3.1808294689089856e-06, + "loss": 1.6477, + "step": 4351 + }, + { + "epoch": 0.746868028144843, + "grad_norm": 3.780379139401222, + "learning_rate": 3.176764964167233e-06, + "loss": 1.4671, + "step": 4352 + }, + { + "epoch": 0.747039643041016, + "grad_norm": 4.035466042971817, + "learning_rate": 3.1727025674990297e-06, + "loss": 1.6253, + "step": 4353 + }, + { + "epoch": 0.7472112579371889, + "grad_norm": 4.523503512640381, + "learning_rate": 3.1686422801594786e-06, + "loss": 1.5261, + "step": 4354 + }, + { + "epoch": 0.7473828728333619, + "grad_norm": 4.31883278979497, + "learning_rate": 3.16458410340302e-06, + "loss": 1.4649, + "step": 4355 + }, + { + "epoch": 0.7475544877295349, + "grad_norm": 4.2683762988933855, + "learning_rate": 3.160528038483451e-06, + "loss": 1.6043, + "step": 4356 + }, + { + "epoch": 0.7477261026257079, + "grad_norm": 3.840626024511075, + "learning_rate": 3.1564740866539166e-06, + "loss": 1.643, + "step": 4357 + }, + { + "epoch": 0.747897717521881, + "grad_norm": 4.781855580486221, + "learning_rate": 3.152422249166902e-06, + "loss": 1.6599, + "step": 4358 + }, + { + "epoch": 0.7480693324180538, + "grad_norm": 3.7472182495196686, + "learning_rate": 3.148372527274246e-06, + "loss": 1.614, + "step": 4359 + }, + { + "epoch": 0.7482409473142269, + "grad_norm": 4.200890232144251, + "learning_rate": 3.1443249222271278e-06, + "loss": 1.7006, + "step": 4360 + }, + { + "epoch": 0.7484125622103999, + "grad_norm": 3.8148884685968665, + "learning_rate": 3.140279435276078e-06, + "loss": 1.5304, + "step": 4361 + }, + { + "epoch": 0.7485841771065729, + "grad_norm": 3.821136573563433, + "learning_rate": 3.1362360676709693e-06, + "loss": 1.5777, + "step": 4362 + }, + { + "epoch": 0.7487557920027459, + "grad_norm": 3.579578321326749, + "learning_rate": 3.1321948206610255e-06, + "loss": 1.8361, + "step": 4363 + }, + { + "epoch": 0.7489274068989188, + "grad_norm": 3.715706697950207, + "learning_rate": 3.128155695494807e-06, + "loss": 1.3442, + "step": 4364 + }, + { + "epoch": 0.7490990217950918, + "grad_norm": 4.190772879519168, + "learning_rate": 3.1241186934202196e-06, + "loss": 1.6312, + "step": 4365 + }, + { + "epoch": 0.7492706366912648, + "grad_norm": 3.7183261313466676, + "learning_rate": 3.120083815684518e-06, + "loss": 1.5766, + "step": 4366 + }, + { + "epoch": 0.7494422515874378, + "grad_norm": 4.355326002073972, + "learning_rate": 3.116051063534299e-06, + "loss": 1.4752, + "step": 4367 + }, + { + "epoch": 0.7496138664836107, + "grad_norm": 3.8506539861445326, + "learning_rate": 3.112020438215506e-06, + "loss": 1.6699, + "step": 4368 + }, + { + "epoch": 0.7497854813797837, + "grad_norm": 3.613591564230547, + "learning_rate": 3.107991940973417e-06, + "loss": 1.3734, + "step": 4369 + }, + { + "epoch": 0.7499570962759567, + "grad_norm": 3.4679787563472653, + "learning_rate": 3.1039655730526543e-06, + "loss": 1.6845, + "step": 4370 + }, + { + "epoch": 0.7501287111721298, + "grad_norm": 4.371918251736763, + "learning_rate": 3.0999413356971885e-06, + "loss": 1.6276, + "step": 4371 + }, + { + "epoch": 0.7503003260683028, + "grad_norm": 4.106581842373159, + "learning_rate": 3.0959192301503284e-06, + "loss": 1.6236, + "step": 4372 + }, + { + "epoch": 0.7504719409644757, + "grad_norm": 3.6726328270673836, + "learning_rate": 3.0918992576547257e-06, + "loss": 1.4985, + "step": 4373 + }, + { + "epoch": 0.7506435558606487, + "grad_norm": 4.093395687833719, + "learning_rate": 3.087881419452368e-06, + "loss": 1.3704, + "step": 4374 + }, + { + "epoch": 0.7508151707568217, + "grad_norm": 3.649386776645276, + "learning_rate": 3.083865716784592e-06, + "loss": 1.5872, + "step": 4375 + }, + { + "epoch": 0.7509867856529947, + "grad_norm": 4.541331779282653, + "learning_rate": 3.079852150892063e-06, + "loss": 1.7192, + "step": 4376 + }, + { + "epoch": 0.7511584005491677, + "grad_norm": 4.080763925371064, + "learning_rate": 3.075840723014796e-06, + "loss": 1.4221, + "step": 4377 + }, + { + "epoch": 0.7513300154453406, + "grad_norm": 3.6730592754379443, + "learning_rate": 3.0718314343921465e-06, + "loss": 1.6523, + "step": 4378 + }, + { + "epoch": 0.7515016303415136, + "grad_norm": 4.31753251013167, + "learning_rate": 3.0678242862627974e-06, + "loss": 1.7085, + "step": 4379 + }, + { + "epoch": 0.7516732452376866, + "grad_norm": 4.075099729353553, + "learning_rate": 3.0638192798647804e-06, + "loss": 1.5867, + "step": 4380 + }, + { + "epoch": 0.7518448601338596, + "grad_norm": 3.7788045181733523, + "learning_rate": 3.0598164164354683e-06, + "loss": 1.632, + "step": 4381 + }, + { + "epoch": 0.7520164750300327, + "grad_norm": 4.371404573389474, + "learning_rate": 3.055815697211557e-06, + "loss": 1.479, + "step": 4382 + }, + { + "epoch": 0.7521880899262056, + "grad_norm": 3.969994890521598, + "learning_rate": 3.051817123429098e-06, + "loss": 1.5711, + "step": 4383 + }, + { + "epoch": 0.7523597048223786, + "grad_norm": 3.9423259909841284, + "learning_rate": 3.0478206963234645e-06, + "loss": 1.4418, + "step": 4384 + }, + { + "epoch": 0.7525313197185516, + "grad_norm": 5.099522267010024, + "learning_rate": 3.0438264171293762e-06, + "loss": 1.6361, + "step": 4385 + }, + { + "epoch": 0.7527029346147246, + "grad_norm": 4.29007202872139, + "learning_rate": 3.039834287080885e-06, + "loss": 1.5624, + "step": 4386 + }, + { + "epoch": 0.7528745495108975, + "grad_norm": 5.650833280005532, + "learning_rate": 3.035844307411384e-06, + "loss": 1.4914, + "step": 4387 + }, + { + "epoch": 0.7530461644070705, + "grad_norm": 3.7929070758176024, + "learning_rate": 3.0318564793535964e-06, + "loss": 1.5812, + "step": 4388 + }, + { + "epoch": 0.7532177793032435, + "grad_norm": 4.953814515321925, + "learning_rate": 3.0278708041395764e-06, + "loss": 1.5046, + "step": 4389 + }, + { + "epoch": 0.7533893941994165, + "grad_norm": 3.9677165722369194, + "learning_rate": 3.0238872830007226e-06, + "loss": 1.4114, + "step": 4390 + }, + { + "epoch": 0.7535610090955895, + "grad_norm": 4.47966330733451, + "learning_rate": 3.0199059171677657e-06, + "loss": 1.568, + "step": 4391 + }, + { + "epoch": 0.7537326239917624, + "grad_norm": 3.769070956683083, + "learning_rate": 3.015926707870768e-06, + "loss": 1.5562, + "step": 4392 + }, + { + "epoch": 0.7539042388879355, + "grad_norm": 4.087450790596648, + "learning_rate": 3.011949656339134e-06, + "loss": 1.4098, + "step": 4393 + }, + { + "epoch": 0.7540758537841085, + "grad_norm": 4.5315360723002005, + "learning_rate": 3.0079747638015798e-06, + "loss": 1.7902, + "step": 4394 + }, + { + "epoch": 0.7542474686802815, + "grad_norm": 3.950329779940293, + "learning_rate": 3.0040020314861772e-06, + "loss": 1.433, + "step": 4395 + }, + { + "epoch": 0.7544190835764545, + "grad_norm": 3.7349475438899633, + "learning_rate": 3.0000314606203206e-06, + "loss": 1.3761, + "step": 4396 + }, + { + "epoch": 0.7545906984726274, + "grad_norm": 3.894904685386614, + "learning_rate": 2.996063052430739e-06, + "loss": 1.5998, + "step": 4397 + }, + { + "epoch": 0.7547623133688004, + "grad_norm": 5.066690052255709, + "learning_rate": 2.9920968081434962e-06, + "loss": 1.7296, + "step": 4398 + }, + { + "epoch": 0.7549339282649734, + "grad_norm": 3.8931630710000964, + "learning_rate": 2.98813272898398e-06, + "loss": 1.5806, + "step": 4399 + }, + { + "epoch": 0.7551055431611464, + "grad_norm": 3.1975501937477433, + "learning_rate": 2.98417081617691e-06, + "loss": 1.418, + "step": 4400 + }, + { + "epoch": 0.7552771580573193, + "grad_norm": 3.885865392167385, + "learning_rate": 2.980211070946343e-06, + "loss": 1.3606, + "step": 4401 + }, + { + "epoch": 0.7554487729534923, + "grad_norm": 4.114796699854625, + "learning_rate": 2.976253494515665e-06, + "loss": 1.557, + "step": 4402 + }, + { + "epoch": 0.7556203878496653, + "grad_norm": 3.387230949391646, + "learning_rate": 2.9722980881075848e-06, + "loss": 1.5567, + "step": 4403 + }, + { + "epoch": 0.7557920027458384, + "grad_norm": 3.871387707984601, + "learning_rate": 2.968344852944147e-06, + "loss": 1.4257, + "step": 4404 + }, + { + "epoch": 0.7559636176420114, + "grad_norm": 4.763593299447595, + "learning_rate": 2.964393790246728e-06, + "loss": 1.5408, + "step": 4405 + }, + { + "epoch": 0.7561352325381843, + "grad_norm": 3.8146427813491894, + "learning_rate": 2.9604449012360213e-06, + "loss": 1.3151, + "step": 4406 + }, + { + "epoch": 0.7563068474343573, + "grad_norm": 3.7267863419570673, + "learning_rate": 2.9564981871320654e-06, + "loss": 1.5651, + "step": 4407 + }, + { + "epoch": 0.7564784623305303, + "grad_norm": 4.1067316997557795, + "learning_rate": 2.9525536491542093e-06, + "loss": 1.4934, + "step": 4408 + }, + { + "epoch": 0.7566500772267033, + "grad_norm": 3.9447501263304465, + "learning_rate": 2.948611288521143e-06, + "loss": 1.4834, + "step": 4409 + }, + { + "epoch": 0.7568216921228763, + "grad_norm": 3.212018776639665, + "learning_rate": 2.944671106450877e-06, + "loss": 1.2799, + "step": 4410 + }, + { + "epoch": 0.7569933070190492, + "grad_norm": 4.073104554948966, + "learning_rate": 2.9407331041607566e-06, + "loss": 1.3709, + "step": 4411 + }, + { + "epoch": 0.7571649219152222, + "grad_norm": 4.748784333190554, + "learning_rate": 2.936797282867443e-06, + "loss": 1.6164, + "step": 4412 + }, + { + "epoch": 0.7573365368113952, + "grad_norm": 3.704864660949902, + "learning_rate": 2.932863643786925e-06, + "loss": 1.46, + "step": 4413 + }, + { + "epoch": 0.7575081517075682, + "grad_norm": 4.048253645903471, + "learning_rate": 2.9289321881345257e-06, + "loss": 1.57, + "step": 4414 + }, + { + "epoch": 0.7576797666037413, + "grad_norm": 3.635895460198856, + "learning_rate": 2.925002917124886e-06, + "loss": 1.5115, + "step": 4415 + }, + { + "epoch": 0.7578513814999142, + "grad_norm": 4.108851723343436, + "learning_rate": 2.9210758319719766e-06, + "loss": 1.4181, + "step": 4416 + }, + { + "epoch": 0.7580229963960872, + "grad_norm": 5.543846090173724, + "learning_rate": 2.9171509338890924e-06, + "loss": 1.6657, + "step": 4417 + }, + { + "epoch": 0.7581946112922602, + "grad_norm": 4.202833507114869, + "learning_rate": 2.913228224088849e-06, + "loss": 1.6016, + "step": 4418 + }, + { + "epoch": 0.7583662261884332, + "grad_norm": 3.299271885011321, + "learning_rate": 2.909307703783183e-06, + "loss": 1.3312, + "step": 4419 + }, + { + "epoch": 0.7585378410846061, + "grad_norm": 3.801345031125703, + "learning_rate": 2.905389374183364e-06, + "loss": 1.6204, + "step": 4420 + }, + { + "epoch": 0.7587094559807791, + "grad_norm": 3.6499202018479533, + "learning_rate": 2.9014732364999785e-06, + "loss": 1.4969, + "step": 4421 + }, + { + "epoch": 0.7588810708769521, + "grad_norm": 3.926759161067827, + "learning_rate": 2.897559291942942e-06, + "loss": 1.4524, + "step": 4422 + }, + { + "epoch": 0.7590526857731251, + "grad_norm": 3.546063765997647, + "learning_rate": 2.8936475417214795e-06, + "loss": 1.6617, + "step": 4423 + }, + { + "epoch": 0.7592243006692981, + "grad_norm": 3.905646401381761, + "learning_rate": 2.8897379870441557e-06, + "loss": 1.3967, + "step": 4424 + }, + { + "epoch": 0.759395915565471, + "grad_norm": 3.425529299989414, + "learning_rate": 2.8858306291188387e-06, + "loss": 1.3189, + "step": 4425 + }, + { + "epoch": 0.759567530461644, + "grad_norm": 3.6579073263437913, + "learning_rate": 2.881925469152731e-06, + "loss": 1.5023, + "step": 4426 + }, + { + "epoch": 0.7597391453578171, + "grad_norm": 4.092137941152871, + "learning_rate": 2.878022508352355e-06, + "loss": 1.7651, + "step": 4427 + }, + { + "epoch": 0.7599107602539901, + "grad_norm": 3.924362930139786, + "learning_rate": 2.874121747923544e-06, + "loss": 1.7015, + "step": 4428 + }, + { + "epoch": 0.7600823751501631, + "grad_norm": 3.8899863715485843, + "learning_rate": 2.8702231890714606e-06, + "loss": 1.6186, + "step": 4429 + }, + { + "epoch": 0.760253990046336, + "grad_norm": 4.457015942288991, + "learning_rate": 2.866326833000589e-06, + "loss": 1.8369, + "step": 4430 + }, + { + "epoch": 0.760425604942509, + "grad_norm": 3.6213059778529795, + "learning_rate": 2.862432680914725e-06, + "loss": 1.4306, + "step": 4431 + }, + { + "epoch": 0.760597219838682, + "grad_norm": 3.2567857130058506, + "learning_rate": 2.8585407340169834e-06, + "loss": 1.5562, + "step": 4432 + }, + { + "epoch": 0.760768834734855, + "grad_norm": 3.9008085925184717, + "learning_rate": 2.8546509935098045e-06, + "loss": 1.6365, + "step": 4433 + }, + { + "epoch": 0.760940449631028, + "grad_norm": 3.839764394599854, + "learning_rate": 2.850763460594943e-06, + "loss": 1.4702, + "step": 4434 + }, + { + "epoch": 0.7611120645272009, + "grad_norm": 3.9404823558506603, + "learning_rate": 2.846878136473472e-06, + "loss": 1.5432, + "step": 4435 + }, + { + "epoch": 0.7612836794233739, + "grad_norm": 3.813111508049283, + "learning_rate": 2.8429950223457903e-06, + "loss": 1.4226, + "step": 4436 + }, + { + "epoch": 0.761455294319547, + "grad_norm": 4.784497204958719, + "learning_rate": 2.8391141194115933e-06, + "loss": 1.3172, + "step": 4437 + }, + { + "epoch": 0.76162690921572, + "grad_norm": 4.32480829849716, + "learning_rate": 2.83523542886991e-06, + "loss": 1.5136, + "step": 4438 + }, + { + "epoch": 0.7617985241118929, + "grad_norm": 5.151200374053781, + "learning_rate": 2.831358951919084e-06, + "loss": 1.7607, + "step": 4439 + }, + { + "epoch": 0.7619701390080659, + "grad_norm": 3.7860110227396246, + "learning_rate": 2.827484689756772e-06, + "loss": 1.4209, + "step": 4440 + }, + { + "epoch": 0.7621417539042389, + "grad_norm": 5.223063833393038, + "learning_rate": 2.8236126435799492e-06, + "loss": 1.4223, + "step": 4441 + }, + { + "epoch": 0.7623133688004119, + "grad_norm": 3.6918572937442473, + "learning_rate": 2.8197428145849037e-06, + "loss": 1.617, + "step": 4442 + }, + { + "epoch": 0.7624849836965849, + "grad_norm": 5.373172443424373, + "learning_rate": 2.8158752039672332e-06, + "loss": 1.7911, + "step": 4443 + }, + { + "epoch": 0.7626565985927578, + "grad_norm": 4.186438387440009, + "learning_rate": 2.812009812921861e-06, + "loss": 1.5638, + "step": 4444 + }, + { + "epoch": 0.7628282134889308, + "grad_norm": 3.423061255454521, + "learning_rate": 2.808146642643018e-06, + "loss": 1.385, + "step": 4445 + }, + { + "epoch": 0.7629998283851038, + "grad_norm": 4.380412032029947, + "learning_rate": 2.8042856943242548e-06, + "loss": 1.5834, + "step": 4446 + }, + { + "epoch": 0.7631714432812768, + "grad_norm": 3.4097907038655815, + "learning_rate": 2.800426969158425e-06, + "loss": 1.5104, + "step": 4447 + }, + { + "epoch": 0.7633430581774499, + "grad_norm": 4.465513251242325, + "learning_rate": 2.796570468337707e-06, + "loss": 1.7648, + "step": 4448 + }, + { + "epoch": 0.7635146730736228, + "grad_norm": 3.474228806745961, + "learning_rate": 2.7927161930535805e-06, + "loss": 1.4709, + "step": 4449 + }, + { + "epoch": 0.7636862879697958, + "grad_norm": 4.658185522178163, + "learning_rate": 2.788864144496848e-06, + "loss": 1.5629, + "step": 4450 + }, + { + "epoch": 0.7638579028659688, + "grad_norm": 3.6260411182022976, + "learning_rate": 2.78501432385762e-06, + "loss": 1.5911, + "step": 4451 + }, + { + "epoch": 0.7640295177621418, + "grad_norm": 3.835475896891879, + "learning_rate": 2.7811667323253155e-06, + "loss": 1.5582, + "step": 4452 + }, + { + "epoch": 0.7642011326583148, + "grad_norm": 4.185508149101832, + "learning_rate": 2.7773213710886693e-06, + "loss": 1.3685, + "step": 4453 + }, + { + "epoch": 0.7643727475544877, + "grad_norm": 3.7827348196473, + "learning_rate": 2.7734782413357274e-06, + "loss": 1.5666, + "step": 4454 + }, + { + "epoch": 0.7645443624506607, + "grad_norm": 3.9627325602002093, + "learning_rate": 2.7696373442538406e-06, + "loss": 1.3867, + "step": 4455 + }, + { + "epoch": 0.7647159773468337, + "grad_norm": 4.017457544356005, + "learning_rate": 2.765798681029679e-06, + "loss": 1.5754, + "step": 4456 + }, + { + "epoch": 0.7648875922430067, + "grad_norm": 4.432502003093646, + "learning_rate": 2.7619622528492105e-06, + "loss": 1.6707, + "step": 4457 + }, + { + "epoch": 0.7650592071391796, + "grad_norm": 4.14798269207826, + "learning_rate": 2.7581280608977233e-06, + "loss": 1.6452, + "step": 4458 + }, + { + "epoch": 0.7652308220353526, + "grad_norm": 3.7581899694010876, + "learning_rate": 2.754296106359811e-06, + "loss": 1.4507, + "step": 4459 + }, + { + "epoch": 0.7654024369315257, + "grad_norm": 3.8054250694000737, + "learning_rate": 2.750466390419376e-06, + "loss": 1.4753, + "step": 4460 + }, + { + "epoch": 0.7655740518276987, + "grad_norm": 3.968374976960048, + "learning_rate": 2.7466389142596296e-06, + "loss": 1.7675, + "step": 4461 + }, + { + "epoch": 0.7657456667238717, + "grad_norm": 3.6452133254946535, + "learning_rate": 2.7428136790630853e-06, + "loss": 1.3846, + "step": 4462 + }, + { + "epoch": 0.7659172816200446, + "grad_norm": 4.323195481441205, + "learning_rate": 2.7389906860115724e-06, + "loss": 1.5019, + "step": 4463 + }, + { + "epoch": 0.7660888965162176, + "grad_norm": 4.163743686034393, + "learning_rate": 2.7351699362862237e-06, + "loss": 1.4608, + "step": 4464 + }, + { + "epoch": 0.7662605114123906, + "grad_norm": 3.268240556746148, + "learning_rate": 2.7313514310674826e-06, + "loss": 1.4187, + "step": 4465 + }, + { + "epoch": 0.7664321263085636, + "grad_norm": 4.316649118879265, + "learning_rate": 2.727535171535094e-06, + "loss": 1.6098, + "step": 4466 + }, + { + "epoch": 0.7666037412047366, + "grad_norm": 3.8992424533540464, + "learning_rate": 2.723721158868107e-06, + "loss": 1.6334, + "step": 4467 + }, + { + "epoch": 0.7667753561009095, + "grad_norm": 6.2022856005080875, + "learning_rate": 2.719909394244884e-06, + "loss": 1.7317, + "step": 4468 + }, + { + "epoch": 0.7669469709970825, + "grad_norm": 3.933953541630205, + "learning_rate": 2.7160998788430882e-06, + "loss": 1.3775, + "step": 4469 + }, + { + "epoch": 0.7671185858932555, + "grad_norm": 3.7712211752873164, + "learning_rate": 2.712292613839692e-06, + "loss": 1.4645, + "step": 4470 + }, + { + "epoch": 0.7672902007894286, + "grad_norm": 4.375401432276708, + "learning_rate": 2.7084876004109663e-06, + "loss": 1.4993, + "step": 4471 + }, + { + "epoch": 0.7674618156856015, + "grad_norm": 4.273813274309977, + "learning_rate": 2.704684839732492e-06, + "loss": 1.399, + "step": 4472 + }, + { + "epoch": 0.7676334305817745, + "grad_norm": 3.3765323403232625, + "learning_rate": 2.700884332979147e-06, + "loss": 1.4704, + "step": 4473 + }, + { + "epoch": 0.7678050454779475, + "grad_norm": 3.674698038908049, + "learning_rate": 2.69708608132512e-06, + "loss": 1.4316, + "step": 4474 + }, + { + "epoch": 0.7679766603741205, + "grad_norm": 3.8614394928487186, + "learning_rate": 2.6932900859439048e-06, + "loss": 1.4047, + "step": 4475 + }, + { + "epoch": 0.7681482752702935, + "grad_norm": 3.2861669001575526, + "learning_rate": 2.689496348008286e-06, + "loss": 1.3076, + "step": 4476 + }, + { + "epoch": 0.7683198901664664, + "grad_norm": 4.084108463315017, + "learning_rate": 2.6857048686903618e-06, + "loss": 1.5516, + "step": 4477 + }, + { + "epoch": 0.7684915050626394, + "grad_norm": 3.568100865348519, + "learning_rate": 2.681915649161534e-06, + "loss": 1.2627, + "step": 4478 + }, + { + "epoch": 0.7686631199588124, + "grad_norm": 3.4677180059123, + "learning_rate": 2.678128690592493e-06, + "loss": 1.391, + "step": 4479 + }, + { + "epoch": 0.7688347348549854, + "grad_norm": 3.6824598718817203, + "learning_rate": 2.6743439941532457e-06, + "loss": 1.401, + "step": 4480 + }, + { + "epoch": 0.7690063497511584, + "grad_norm": 3.8041152582511075, + "learning_rate": 2.6705615610130885e-06, + "loss": 1.488, + "step": 4481 + }, + { + "epoch": 0.7691779646473313, + "grad_norm": 3.705435111785703, + "learning_rate": 2.666781392340626e-06, + "loss": 1.64, + "step": 4482 + }, + { + "epoch": 0.7693495795435044, + "grad_norm": 4.24375975458298, + "learning_rate": 2.6630034893037617e-06, + "loss": 1.4309, + "step": 4483 + }, + { + "epoch": 0.7695211944396774, + "grad_norm": 3.8504794019221187, + "learning_rate": 2.659227853069699e-06, + "loss": 1.4305, + "step": 4484 + }, + { + "epoch": 0.7696928093358504, + "grad_norm": 3.75359595511843, + "learning_rate": 2.655454484804938e-06, + "loss": 1.5275, + "step": 4485 + }, + { + "epoch": 0.7698644242320234, + "grad_norm": 3.914110370683981, + "learning_rate": 2.651683385675279e-06, + "loss": 1.6047, + "step": 4486 + }, + { + "epoch": 0.7700360391281963, + "grad_norm": 3.840239834633645, + "learning_rate": 2.647914556845823e-06, + "loss": 1.7163, + "step": 4487 + }, + { + "epoch": 0.7702076540243693, + "grad_norm": 4.857834316263262, + "learning_rate": 2.6441479994809703e-06, + "loss": 1.5096, + "step": 4488 + }, + { + "epoch": 0.7703792689205423, + "grad_norm": 4.100397469726632, + "learning_rate": 2.640383714744421e-06, + "loss": 1.507, + "step": 4489 + }, + { + "epoch": 0.7705508838167153, + "grad_norm": 4.548457883960667, + "learning_rate": 2.636621703799167e-06, + "loss": 1.7107, + "step": 4490 + }, + { + "epoch": 0.7707224987128882, + "grad_norm": 3.67459602559332, + "learning_rate": 2.6328619678074986e-06, + "loss": 1.5633, + "step": 4491 + }, + { + "epoch": 0.7708941136090612, + "grad_norm": 4.101174492648497, + "learning_rate": 2.6291045079310085e-06, + "loss": 1.3496, + "step": 4492 + }, + { + "epoch": 0.7710657285052342, + "grad_norm": 4.056990905886543, + "learning_rate": 2.625349325330582e-06, + "loss": 1.5651, + "step": 4493 + }, + { + "epoch": 0.7712373434014073, + "grad_norm": 3.733589344347334, + "learning_rate": 2.621596421166407e-06, + "loss": 1.5497, + "step": 4494 + }, + { + "epoch": 0.7714089582975803, + "grad_norm": 3.783740714792206, + "learning_rate": 2.6178457965979543e-06, + "loss": 1.5104, + "step": 4495 + }, + { + "epoch": 0.7715805731937532, + "grad_norm": 3.7677226534491903, + "learning_rate": 2.614097452784006e-06, + "loss": 1.3819, + "step": 4496 + }, + { + "epoch": 0.7717521880899262, + "grad_norm": 4.423261448478865, + "learning_rate": 2.610351390882626e-06, + "loss": 1.4501, + "step": 4497 + }, + { + "epoch": 0.7719238029860992, + "grad_norm": 3.9400255608160673, + "learning_rate": 2.606607612051183e-06, + "loss": 1.6644, + "step": 4498 + }, + { + "epoch": 0.7720954178822722, + "grad_norm": 4.053939427153468, + "learning_rate": 2.602866117446339e-06, + "loss": 1.6112, + "step": 4499 + }, + { + "epoch": 0.7722670327784452, + "grad_norm": 5.174646121012897, + "learning_rate": 2.599126908224041e-06, + "loss": 1.4842, + "step": 4500 + }, + { + "epoch": 0.7724386476746181, + "grad_norm": 3.352561240146486, + "learning_rate": 2.5953899855395428e-06, + "loss": 1.296, + "step": 4501 + }, + { + "epoch": 0.7726102625707911, + "grad_norm": 4.188214515774507, + "learning_rate": 2.591655350547385e-06, + "loss": 1.792, + "step": 4502 + }, + { + "epoch": 0.7727818774669641, + "grad_norm": 4.6857347627633175, + "learning_rate": 2.5879230044014003e-06, + "loss": 1.4828, + "step": 4503 + }, + { + "epoch": 0.7729534923631372, + "grad_norm": 3.8344725531467905, + "learning_rate": 2.584192948254719e-06, + "loss": 1.3325, + "step": 4504 + }, + { + "epoch": 0.7731251072593102, + "grad_norm": 3.783532423223033, + "learning_rate": 2.580465183259757e-06, + "loss": 1.5223, + "step": 4505 + }, + { + "epoch": 0.7732967221554831, + "grad_norm": 3.98052990658642, + "learning_rate": 2.576739710568229e-06, + "loss": 1.5266, + "step": 4506 + }, + { + "epoch": 0.7734683370516561, + "grad_norm": 3.8682766203140537, + "learning_rate": 2.5730165313311396e-06, + "loss": 1.5732, + "step": 4507 + }, + { + "epoch": 0.7736399519478291, + "grad_norm": 4.123862917529191, + "learning_rate": 2.569295646698785e-06, + "loss": 1.591, + "step": 4508 + }, + { + "epoch": 0.7738115668440021, + "grad_norm": 4.098189577086668, + "learning_rate": 2.5655770578207505e-06, + "loss": 1.557, + "step": 4509 + }, + { + "epoch": 0.773983181740175, + "grad_norm": 6.481973805275883, + "learning_rate": 2.5618607658459105e-06, + "loss": 1.3156, + "step": 4510 + }, + { + "epoch": 0.774154796636348, + "grad_norm": 3.9290315116549888, + "learning_rate": 2.558146771922434e-06, + "loss": 1.704, + "step": 4511 + }, + { + "epoch": 0.774326411532521, + "grad_norm": 4.528762313524045, + "learning_rate": 2.55443507719778e-06, + "loss": 1.4495, + "step": 4512 + }, + { + "epoch": 0.774498026428694, + "grad_norm": 4.286421310674162, + "learning_rate": 2.5507256828186978e-06, + "loss": 1.5507, + "step": 4513 + }, + { + "epoch": 0.774669641324867, + "grad_norm": 3.7519312087952454, + "learning_rate": 2.5470185899312207e-06, + "loss": 1.4529, + "step": 4514 + }, + { + "epoch": 0.7748412562210399, + "grad_norm": 4.241760455792032, + "learning_rate": 2.5433137996806723e-06, + "loss": 1.4337, + "step": 4515 + }, + { + "epoch": 0.775012871117213, + "grad_norm": 3.7873020720678174, + "learning_rate": 2.5396113132116685e-06, + "loss": 1.2707, + "step": 4516 + }, + { + "epoch": 0.775184486013386, + "grad_norm": 4.1714384224692616, + "learning_rate": 2.5359111316681116e-06, + "loss": 1.5916, + "step": 4517 + }, + { + "epoch": 0.775356100909559, + "grad_norm": 3.652784792387227, + "learning_rate": 2.5322132561931946e-06, + "loss": 1.5952, + "step": 4518 + }, + { + "epoch": 0.775527715805732, + "grad_norm": 4.953929865625447, + "learning_rate": 2.5285176879293903e-06, + "loss": 1.5933, + "step": 4519 + }, + { + "epoch": 0.7756993307019049, + "grad_norm": 5.110724693408642, + "learning_rate": 2.5248244280184688e-06, + "loss": 1.5387, + "step": 4520 + }, + { + "epoch": 0.7758709455980779, + "grad_norm": 4.646757485427627, + "learning_rate": 2.5211334776014752e-06, + "loss": 1.6548, + "step": 4521 + }, + { + "epoch": 0.7760425604942509, + "grad_norm": 3.950822310675609, + "learning_rate": 2.517444837818751e-06, + "loss": 1.6421, + "step": 4522 + }, + { + "epoch": 0.7762141753904239, + "grad_norm": 3.664188226279325, + "learning_rate": 2.5137585098099237e-06, + "loss": 1.5203, + "step": 4523 + }, + { + "epoch": 0.7763857902865968, + "grad_norm": 3.7627221089697738, + "learning_rate": 2.5100744947138967e-06, + "loss": 1.4911, + "step": 4524 + }, + { + "epoch": 0.7765574051827698, + "grad_norm": 3.7254328235045406, + "learning_rate": 2.506392793668869e-06, + "loss": 1.4806, + "step": 4525 + }, + { + "epoch": 0.7767290200789428, + "grad_norm": 4.350941848753395, + "learning_rate": 2.502713407812324e-06, + "loss": 1.4898, + "step": 4526 + }, + { + "epoch": 0.7769006349751159, + "grad_norm": 3.869375522392363, + "learning_rate": 2.4990363382810214e-06, + "loss": 1.3624, + "step": 4527 + }, + { + "epoch": 0.7770722498712889, + "grad_norm": 4.128531803296474, + "learning_rate": 2.4953615862110146e-06, + "loss": 1.5084, + "step": 4528 + }, + { + "epoch": 0.7772438647674618, + "grad_norm": 3.3730510984713513, + "learning_rate": 2.4916891527376342e-06, + "loss": 1.5021, + "step": 4529 + }, + { + "epoch": 0.7774154796636348, + "grad_norm": 3.501713048941487, + "learning_rate": 2.488019038995497e-06, + "loss": 1.5368, + "step": 4530 + }, + { + "epoch": 0.7775870945598078, + "grad_norm": 4.037353076532261, + "learning_rate": 2.484351246118507e-06, + "loss": 1.5601, + "step": 4531 + }, + { + "epoch": 0.7777587094559808, + "grad_norm": 8.097110207268901, + "learning_rate": 2.4806857752398484e-06, + "loss": 1.5914, + "step": 4532 + }, + { + "epoch": 0.7779303243521538, + "grad_norm": 3.536299649948467, + "learning_rate": 2.477022627491985e-06, + "loss": 1.4101, + "step": 4533 + }, + { + "epoch": 0.7781019392483267, + "grad_norm": 3.2000364500466705, + "learning_rate": 2.473361804006662e-06, + "loss": 1.3659, + "step": 4534 + }, + { + "epoch": 0.7782735541444997, + "grad_norm": 5.089967887166785, + "learning_rate": 2.469703305914913e-06, + "loss": 1.6735, + "step": 4535 + }, + { + "epoch": 0.7784451690406727, + "grad_norm": 4.007247000947681, + "learning_rate": 2.466047134347049e-06, + "loss": 1.6003, + "step": 4536 + }, + { + "epoch": 0.7786167839368457, + "grad_norm": 4.204910629456333, + "learning_rate": 2.4623932904326665e-06, + "loss": 1.3892, + "step": 4537 + }, + { + "epoch": 0.7787883988330188, + "grad_norm": 3.987292947688818, + "learning_rate": 2.458741775300637e-06, + "loss": 1.433, + "step": 4538 + }, + { + "epoch": 0.7789600137291917, + "grad_norm": 4.360384539749673, + "learning_rate": 2.45509259007911e-06, + "loss": 1.406, + "step": 4539 + }, + { + "epoch": 0.7791316286253647, + "grad_norm": 4.522888518202757, + "learning_rate": 2.4514457358955255e-06, + "loss": 1.4886, + "step": 4540 + }, + { + "epoch": 0.7793032435215377, + "grad_norm": 4.020788045386832, + "learning_rate": 2.4478012138765962e-06, + "loss": 1.3645, + "step": 4541 + }, + { + "epoch": 0.7794748584177107, + "grad_norm": 3.5694199406070832, + "learning_rate": 2.4441590251483194e-06, + "loss": 1.3175, + "step": 4542 + }, + { + "epoch": 0.7796464733138836, + "grad_norm": 4.019233931035732, + "learning_rate": 2.4405191708359623e-06, + "loss": 1.4375, + "step": 4543 + }, + { + "epoch": 0.7798180882100566, + "grad_norm": 4.091775692677808, + "learning_rate": 2.4368816520640813e-06, + "loss": 1.4375, + "step": 4544 + }, + { + "epoch": 0.7799897031062296, + "grad_norm": 4.270409007492003, + "learning_rate": 2.4332464699565015e-06, + "loss": 1.6578, + "step": 4545 + }, + { + "epoch": 0.7801613180024026, + "grad_norm": 4.134044993120238, + "learning_rate": 2.4296136256363345e-06, + "loss": 1.6776, + "step": 4546 + }, + { + "epoch": 0.7803329328985756, + "grad_norm": 3.975188865992825, + "learning_rate": 2.4259831202259697e-06, + "loss": 1.5708, + "step": 4547 + }, + { + "epoch": 0.7805045477947485, + "grad_norm": 3.8247261437705933, + "learning_rate": 2.4223549548470628e-06, + "loss": 1.5583, + "step": 4548 + }, + { + "epoch": 0.7806761626909215, + "grad_norm": 3.9786166538535, + "learning_rate": 2.4187291306205573e-06, + "loss": 1.3402, + "step": 4549 + }, + { + "epoch": 0.7808477775870946, + "grad_norm": 3.844275444933562, + "learning_rate": 2.4151056486666748e-06, + "loss": 1.6355, + "step": 4550 + }, + { + "epoch": 0.7810193924832676, + "grad_norm": 4.273336345579903, + "learning_rate": 2.4114845101049013e-06, + "loss": 1.8647, + "step": 4551 + }, + { + "epoch": 0.7811910073794406, + "grad_norm": 4.087677119858582, + "learning_rate": 2.407865716054013e-06, + "loss": 1.4818, + "step": 4552 + }, + { + "epoch": 0.7813626222756135, + "grad_norm": 4.182573259912535, + "learning_rate": 2.4042492676320482e-06, + "loss": 1.3974, + "step": 4553 + }, + { + "epoch": 0.7815342371717865, + "grad_norm": 4.125214870502783, + "learning_rate": 2.400635165956331e-06, + "loss": 1.6131, + "step": 4554 + }, + { + "epoch": 0.7817058520679595, + "grad_norm": 3.661800085485567, + "learning_rate": 2.3970234121434555e-06, + "loss": 1.5482, + "step": 4555 + }, + { + "epoch": 0.7818774669641325, + "grad_norm": 4.145476093215818, + "learning_rate": 2.393414007309296e-06, + "loss": 1.7584, + "step": 4556 + }, + { + "epoch": 0.7820490818603055, + "grad_norm": 3.8974989480822524, + "learning_rate": 2.3898069525689925e-06, + "loss": 1.5632, + "step": 4557 + }, + { + "epoch": 0.7822206967564784, + "grad_norm": 4.605563261774743, + "learning_rate": 2.386202249036961e-06, + "loss": 1.4757, + "step": 4558 + }, + { + "epoch": 0.7823923116526514, + "grad_norm": 4.066695993715914, + "learning_rate": 2.3825998978268962e-06, + "loss": 1.6209, + "step": 4559 + }, + { + "epoch": 0.7825639265488245, + "grad_norm": 4.514725466414961, + "learning_rate": 2.3789999000517627e-06, + "loss": 1.5252, + "step": 4560 + }, + { + "epoch": 0.7827355414449975, + "grad_norm": 3.8552784172237367, + "learning_rate": 2.3754022568238e-06, + "loss": 1.4558, + "step": 4561 + }, + { + "epoch": 0.7829071563411704, + "grad_norm": 3.669796767190878, + "learning_rate": 2.3718069692545187e-06, + "loss": 1.6467, + "step": 4562 + }, + { + "epoch": 0.7830787712373434, + "grad_norm": 3.9705014700658086, + "learning_rate": 2.368214038454695e-06, + "loss": 1.4688, + "step": 4563 + }, + { + "epoch": 0.7832503861335164, + "grad_norm": 4.039811873462365, + "learning_rate": 2.3646234655343894e-06, + "loss": 1.4324, + "step": 4564 + }, + { + "epoch": 0.7834220010296894, + "grad_norm": 4.046197720990133, + "learning_rate": 2.3610352516029244e-06, + "loss": 1.417, + "step": 4565 + }, + { + "epoch": 0.7835936159258624, + "grad_norm": 4.414451727332304, + "learning_rate": 2.3574493977689026e-06, + "loss": 1.391, + "step": 4566 + }, + { + "epoch": 0.7837652308220353, + "grad_norm": 3.5724871672587692, + "learning_rate": 2.353865905140187e-06, + "loss": 1.2887, + "step": 4567 + }, + { + "epoch": 0.7839368457182083, + "grad_norm": 4.118855242545041, + "learning_rate": 2.350284774823919e-06, + "loss": 1.3697, + "step": 4568 + }, + { + "epoch": 0.7841084606143813, + "grad_norm": 5.06417793779705, + "learning_rate": 2.3467060079265026e-06, + "loss": 1.5435, + "step": 4569 + }, + { + "epoch": 0.7842800755105543, + "grad_norm": 3.6181555666804472, + "learning_rate": 2.34312960555362e-06, + "loss": 1.3425, + "step": 4570 + }, + { + "epoch": 0.7844516904067274, + "grad_norm": 4.451075665002024, + "learning_rate": 2.339555568810221e-06, + "loss": 1.3767, + "step": 4571 + }, + { + "epoch": 0.7846233053029003, + "grad_norm": 4.134181542467542, + "learning_rate": 2.3359838988005177e-06, + "loss": 1.3222, + "step": 4572 + }, + { + "epoch": 0.7847949201990733, + "grad_norm": 4.073668045984023, + "learning_rate": 2.3324145966279978e-06, + "loss": 1.519, + "step": 4573 + }, + { + "epoch": 0.7849665350952463, + "grad_norm": 4.530114921473998, + "learning_rate": 2.3288476633954194e-06, + "loss": 1.7735, + "step": 4574 + }, + { + "epoch": 0.7851381499914193, + "grad_norm": 4.121616005573902, + "learning_rate": 2.3252831002047983e-06, + "loss": 1.4971, + "step": 4575 + }, + { + "epoch": 0.7853097648875922, + "grad_norm": 4.323196090661877, + "learning_rate": 2.321720908157431e-06, + "loss": 1.5966, + "step": 4576 + }, + { + "epoch": 0.7854813797837652, + "grad_norm": 3.6960822152095045, + "learning_rate": 2.3181610883538687e-06, + "loss": 1.5503, + "step": 4577 + }, + { + "epoch": 0.7856529946799382, + "grad_norm": 4.422497123418933, + "learning_rate": 2.314603641893939e-06, + "loss": 1.2902, + "step": 4578 + }, + { + "epoch": 0.7858246095761112, + "grad_norm": 4.000794942195877, + "learning_rate": 2.3110485698767336e-06, + "loss": 1.4545, + "step": 4579 + }, + { + "epoch": 0.7859962244722842, + "grad_norm": 3.714867332806828, + "learning_rate": 2.3074958734006136e-06, + "loss": 1.5196, + "step": 4580 + }, + { + "epoch": 0.7861678393684571, + "grad_norm": 4.122749732512584, + "learning_rate": 2.3039455535631995e-06, + "loss": 1.6527, + "step": 4581 + }, + { + "epoch": 0.7863394542646301, + "grad_norm": 3.5470760552100566, + "learning_rate": 2.300397611461378e-06, + "loss": 1.3618, + "step": 4582 + }, + { + "epoch": 0.7865110691608032, + "grad_norm": 3.660475328826211, + "learning_rate": 2.296852048191306e-06, + "loss": 1.548, + "step": 4583 + }, + { + "epoch": 0.7866826840569762, + "grad_norm": 4.379460706114087, + "learning_rate": 2.293308864848405e-06, + "loss": 1.3725, + "step": 4584 + }, + { + "epoch": 0.7868542989531492, + "grad_norm": 4.110023104077812, + "learning_rate": 2.2897680625273623e-06, + "loss": 1.4684, + "step": 4585 + }, + { + "epoch": 0.7870259138493221, + "grad_norm": 3.8559896212443614, + "learning_rate": 2.2862296423221232e-06, + "loss": 1.3079, + "step": 4586 + }, + { + "epoch": 0.7871975287454951, + "grad_norm": 4.456887935980542, + "learning_rate": 2.2826936053258986e-06, + "loss": 1.5977, + "step": 4587 + }, + { + "epoch": 0.7873691436416681, + "grad_norm": 4.3797378698923595, + "learning_rate": 2.279159952631168e-06, + "loss": 1.6759, + "step": 4588 + }, + { + "epoch": 0.7875407585378411, + "grad_norm": 4.895653407277305, + "learning_rate": 2.2756286853296716e-06, + "loss": 1.5717, + "step": 4589 + }, + { + "epoch": 0.7877123734340141, + "grad_norm": 4.167115803492636, + "learning_rate": 2.2720998045124155e-06, + "loss": 1.447, + "step": 4590 + }, + { + "epoch": 0.787883988330187, + "grad_norm": 4.174668458199423, + "learning_rate": 2.2685733112696606e-06, + "loss": 1.5835, + "step": 4591 + }, + { + "epoch": 0.78805560322636, + "grad_norm": 4.9347806304797714, + "learning_rate": 2.265049206690939e-06, + "loss": 1.5882, + "step": 4592 + }, + { + "epoch": 0.788227218122533, + "grad_norm": 4.545062690187829, + "learning_rate": 2.2615274918650376e-06, + "loss": 1.57, + "step": 4593 + }, + { + "epoch": 0.7883988330187061, + "grad_norm": 4.7718352818444325, + "learning_rate": 2.258008167880009e-06, + "loss": 1.3868, + "step": 4594 + }, + { + "epoch": 0.788570447914879, + "grad_norm": 4.144523611947049, + "learning_rate": 2.2544912358231706e-06, + "loss": 1.3997, + "step": 4595 + }, + { + "epoch": 0.788742062811052, + "grad_norm": 4.534395590692036, + "learning_rate": 2.2509766967810896e-06, + "loss": 1.6358, + "step": 4596 + }, + { + "epoch": 0.788913677707225, + "grad_norm": 3.398494400720974, + "learning_rate": 2.2474645518396065e-06, + "loss": 1.5059, + "step": 4597 + }, + { + "epoch": 0.789085292603398, + "grad_norm": 4.805576840139967, + "learning_rate": 2.2439548020838166e-06, + "loss": 1.7907, + "step": 4598 + }, + { + "epoch": 0.789256907499571, + "grad_norm": 3.950006902277852, + "learning_rate": 2.24044744859807e-06, + "loss": 1.5026, + "step": 4599 + }, + { + "epoch": 0.7894285223957439, + "grad_norm": 5.691220905255036, + "learning_rate": 2.2369424924659877e-06, + "loss": 1.5733, + "step": 4600 + }, + { + "epoch": 0.7896001372919169, + "grad_norm": 4.143064291643852, + "learning_rate": 2.233439934770438e-06, + "loss": 1.4565, + "step": 4601 + }, + { + "epoch": 0.7897717521880899, + "grad_norm": 4.1962305872311605, + "learning_rate": 2.2299397765935582e-06, + "loss": 1.6948, + "step": 4602 + }, + { + "epoch": 0.7899433670842629, + "grad_norm": 4.136587175410183, + "learning_rate": 2.226442019016739e-06, + "loss": 1.5132, + "step": 4603 + }, + { + "epoch": 0.790114981980436, + "grad_norm": 3.676086791613163, + "learning_rate": 2.222946663120633e-06, + "loss": 1.4024, + "step": 4604 + }, + { + "epoch": 0.7902865968766088, + "grad_norm": 3.744341637558946, + "learning_rate": 2.2194537099851465e-06, + "loss": 1.4473, + "step": 4605 + }, + { + "epoch": 0.7904582117727819, + "grad_norm": 4.4355070229087605, + "learning_rate": 2.2159631606894427e-06, + "loss": 1.4113, + "step": 4606 + }, + { + "epoch": 0.7906298266689549, + "grad_norm": 3.5095623292619353, + "learning_rate": 2.2124750163119457e-06, + "loss": 1.4368, + "step": 4607 + }, + { + "epoch": 0.7908014415651279, + "grad_norm": 3.7407516024751404, + "learning_rate": 2.208989277930338e-06, + "loss": 1.4771, + "step": 4608 + }, + { + "epoch": 0.7909730564613009, + "grad_norm": 4.007317668639387, + "learning_rate": 2.205505946621558e-06, + "loss": 1.5252, + "step": 4609 + }, + { + "epoch": 0.7911446713574738, + "grad_norm": 3.7571818491464426, + "learning_rate": 2.2020250234617957e-06, + "loss": 1.5841, + "step": 4610 + }, + { + "epoch": 0.7913162862536468, + "grad_norm": 4.612550439977165, + "learning_rate": 2.198546509526498e-06, + "loss": 1.5319, + "step": 4611 + }, + { + "epoch": 0.7914879011498198, + "grad_norm": 4.005278833794611, + "learning_rate": 2.1950704058903727e-06, + "loss": 1.5433, + "step": 4612 + }, + { + "epoch": 0.7916595160459928, + "grad_norm": 3.8769935412426224, + "learning_rate": 2.1915967136273797e-06, + "loss": 1.5392, + "step": 4613 + }, + { + "epoch": 0.7918311309421657, + "grad_norm": 4.090312146460333, + "learning_rate": 2.188125433810736e-06, + "loss": 1.5156, + "step": 4614 + }, + { + "epoch": 0.7920027458383387, + "grad_norm": 3.5700381420604255, + "learning_rate": 2.1846565675129074e-06, + "loss": 1.2586, + "step": 4615 + }, + { + "epoch": 0.7921743607345118, + "grad_norm": 3.7294634405637885, + "learning_rate": 2.1811901158056213e-06, + "loss": 1.3121, + "step": 4616 + }, + { + "epoch": 0.7923459756306848, + "grad_norm": 3.2927047845992194, + "learning_rate": 2.1777260797598523e-06, + "loss": 1.5085, + "step": 4617 + }, + { + "epoch": 0.7925175905268578, + "grad_norm": 3.6449002749128354, + "learning_rate": 2.174264460445834e-06, + "loss": 1.385, + "step": 4618 + }, + { + "epoch": 0.7926892054230307, + "grad_norm": 3.7236303330209855, + "learning_rate": 2.1708052589330553e-06, + "loss": 1.6128, + "step": 4619 + }, + { + "epoch": 0.7928608203192037, + "grad_norm": 4.125968023663776, + "learning_rate": 2.1673484762902473e-06, + "loss": 1.6141, + "step": 4620 + }, + { + "epoch": 0.7930324352153767, + "grad_norm": 3.517979458271183, + "learning_rate": 2.1638941135854042e-06, + "loss": 1.259, + "step": 4621 + }, + { + "epoch": 0.7932040501115497, + "grad_norm": 4.18192251590275, + "learning_rate": 2.1604421718857714e-06, + "loss": 1.4995, + "step": 4622 + }, + { + "epoch": 0.7933756650077227, + "grad_norm": 3.799625812860469, + "learning_rate": 2.156992652257839e-06, + "loss": 1.6501, + "step": 4623 + }, + { + "epoch": 0.7935472799038956, + "grad_norm": 4.184488734604103, + "learning_rate": 2.153545555767359e-06, + "loss": 1.5952, + "step": 4624 + }, + { + "epoch": 0.7937188948000686, + "grad_norm": 3.724464336165782, + "learning_rate": 2.1501008834793257e-06, + "loss": 1.4416, + "step": 4625 + }, + { + "epoch": 0.7938905096962416, + "grad_norm": 3.9124088421508563, + "learning_rate": 2.146658636457989e-06, + "loss": 1.498, + "step": 4626 + }, + { + "epoch": 0.7940621245924147, + "grad_norm": 4.035782081627623, + "learning_rate": 2.143218815766849e-06, + "loss": 1.3016, + "step": 4627 + }, + { + "epoch": 0.7942337394885877, + "grad_norm": 4.47954400839062, + "learning_rate": 2.13978142246866e-06, + "loss": 1.4967, + "step": 4628 + }, + { + "epoch": 0.7944053543847606, + "grad_norm": 3.803009861546744, + "learning_rate": 2.136346457625418e-06, + "loss": 1.613, + "step": 4629 + }, + { + "epoch": 0.7945769692809336, + "grad_norm": 4.004745018724956, + "learning_rate": 2.132913922298372e-06, + "loss": 1.5802, + "step": 4630 + }, + { + "epoch": 0.7947485841771066, + "grad_norm": 6.184376835111989, + "learning_rate": 2.1294838175480237e-06, + "loss": 1.6301, + "step": 4631 + }, + { + "epoch": 0.7949201990732796, + "grad_norm": 4.585837532680891, + "learning_rate": 2.12605614443412e-06, + "loss": 1.489, + "step": 4632 + }, + { + "epoch": 0.7950918139694525, + "grad_norm": 3.9897481821223018, + "learning_rate": 2.1226309040156634e-06, + "loss": 1.5182, + "step": 4633 + }, + { + "epoch": 0.7952634288656255, + "grad_norm": 4.662142462027689, + "learning_rate": 2.1192080973508944e-06, + "loss": 1.6099, + "step": 4634 + }, + { + "epoch": 0.7954350437617985, + "grad_norm": 3.9027942977554955, + "learning_rate": 2.1157877254973057e-06, + "loss": 1.4579, + "step": 4635 + }, + { + "epoch": 0.7956066586579715, + "grad_norm": 3.500944414217306, + "learning_rate": 2.1123697895116412e-06, + "loss": 1.4072, + "step": 4636 + }, + { + "epoch": 0.7957782735541445, + "grad_norm": 3.9063468670755426, + "learning_rate": 2.1089542904498893e-06, + "loss": 1.5136, + "step": 4637 + }, + { + "epoch": 0.7959498884503174, + "grad_norm": 3.51807468341857, + "learning_rate": 2.1055412293672883e-06, + "loss": 1.3795, + "step": 4638 + }, + { + "epoch": 0.7961215033464905, + "grad_norm": 3.7092350093674487, + "learning_rate": 2.1021306073183166e-06, + "loss": 1.6764, + "step": 4639 + }, + { + "epoch": 0.7962931182426635, + "grad_norm": 4.355211550983136, + "learning_rate": 2.0987224253567096e-06, + "loss": 1.5256, + "step": 4640 + }, + { + "epoch": 0.7964647331388365, + "grad_norm": 4.084868416364256, + "learning_rate": 2.0953166845354346e-06, + "loss": 1.6207, + "step": 4641 + }, + { + "epoch": 0.7966363480350095, + "grad_norm": 3.4407769552269603, + "learning_rate": 2.0919133859067174e-06, + "loss": 1.449, + "step": 4642 + }, + { + "epoch": 0.7968079629311824, + "grad_norm": 4.834590952221245, + "learning_rate": 2.0885125305220267e-06, + "loss": 1.5994, + "step": 4643 + }, + { + "epoch": 0.7969795778273554, + "grad_norm": 3.5615286063482854, + "learning_rate": 2.0851141194320688e-06, + "loss": 1.4866, + "step": 4644 + }, + { + "epoch": 0.7971511927235284, + "grad_norm": 4.019456861440628, + "learning_rate": 2.0817181536868035e-06, + "loss": 1.3091, + "step": 4645 + }, + { + "epoch": 0.7973228076197014, + "grad_norm": 3.59124948231215, + "learning_rate": 2.0783246343354334e-06, + "loss": 1.49, + "step": 4646 + }, + { + "epoch": 0.7974944225158743, + "grad_norm": 6.121992832398803, + "learning_rate": 2.0749335624264e-06, + "loss": 1.5963, + "step": 4647 + }, + { + "epoch": 0.7976660374120473, + "grad_norm": 4.5099743319393415, + "learning_rate": 2.071544939007397e-06, + "loss": 1.5077, + "step": 4648 + }, + { + "epoch": 0.7978376523082203, + "grad_norm": 3.673607780839353, + "learning_rate": 2.068158765125352e-06, + "loss": 1.3995, + "step": 4649 + }, + { + "epoch": 0.7980092672043934, + "grad_norm": 4.133335034633506, + "learning_rate": 2.064775041826442e-06, + "loss": 1.4355, + "step": 4650 + }, + { + "epoch": 0.7981808821005664, + "grad_norm": 3.9052805724034685, + "learning_rate": 2.061393770156088e-06, + "loss": 1.7653, + "step": 4651 + }, + { + "epoch": 0.7983524969967393, + "grad_norm": 4.322315352817901, + "learning_rate": 2.058014951158952e-06, + "loss": 1.6881, + "step": 4652 + }, + { + "epoch": 0.7985241118929123, + "grad_norm": 3.0129296469013007, + "learning_rate": 2.0546385858789363e-06, + "loss": 1.1219, + "step": 4653 + }, + { + "epoch": 0.7986957267890853, + "grad_norm": 5.431230361859624, + "learning_rate": 2.0512646753591827e-06, + "loss": 1.6602, + "step": 4654 + }, + { + "epoch": 0.7988673416852583, + "grad_norm": 3.5238135179744012, + "learning_rate": 2.047893220642081e-06, + "loss": 1.4516, + "step": 4655 + }, + { + "epoch": 0.7990389565814313, + "grad_norm": 4.051884364875317, + "learning_rate": 2.0445242227692597e-06, + "loss": 1.4125, + "step": 4656 + }, + { + "epoch": 0.7992105714776042, + "grad_norm": 4.0383029252118385, + "learning_rate": 2.0411576827815904e-06, + "loss": 1.7096, + "step": 4657 + }, + { + "epoch": 0.7993821863737772, + "grad_norm": 4.187493705477895, + "learning_rate": 2.0377936017191803e-06, + "loss": 1.492, + "step": 4658 + }, + { + "epoch": 0.7995538012699502, + "grad_norm": 4.456936084030553, + "learning_rate": 2.0344319806213774e-06, + "loss": 1.5869, + "step": 4659 + }, + { + "epoch": 0.7997254161661232, + "grad_norm": 4.246077510198194, + "learning_rate": 2.031072820526775e-06, + "loss": 1.3872, + "step": 4660 + }, + { + "epoch": 0.7998970310622963, + "grad_norm": 3.4948357368580245, + "learning_rate": 2.0277161224732012e-06, + "loss": 1.4101, + "step": 4661 + }, + { + "epoch": 0.8000686459584692, + "grad_norm": 4.2907802548054335, + "learning_rate": 2.024361887497728e-06, + "loss": 1.4822, + "step": 4662 + }, + { + "epoch": 0.8002402608546422, + "grad_norm": 4.0944230094276275, + "learning_rate": 2.02101011663666e-06, + "loss": 1.5002, + "step": 4663 + }, + { + "epoch": 0.8004118757508152, + "grad_norm": 3.90006943843002, + "learning_rate": 2.017660810925548e-06, + "loss": 1.6627, + "step": 4664 + }, + { + "epoch": 0.8005834906469882, + "grad_norm": 4.357170239633029, + "learning_rate": 2.014313971399171e-06, + "loss": 1.7669, + "step": 4665 + }, + { + "epoch": 0.8007551055431611, + "grad_norm": 3.982979722074304, + "learning_rate": 2.010969599091557e-06, + "loss": 1.4318, + "step": 4666 + }, + { + "epoch": 0.8009267204393341, + "grad_norm": 3.768009542140195, + "learning_rate": 2.0076276950359687e-06, + "loss": 1.5569, + "step": 4667 + }, + { + "epoch": 0.8010983353355071, + "grad_norm": 4.126049283758045, + "learning_rate": 2.0042882602648996e-06, + "loss": 1.6959, + "step": 4668 + }, + { + "epoch": 0.8012699502316801, + "grad_norm": 4.634676631020037, + "learning_rate": 2.0009512958100862e-06, + "loss": 1.7168, + "step": 4669 + }, + { + "epoch": 0.8014415651278531, + "grad_norm": 3.6905406160230276, + "learning_rate": 1.9976168027025066e-06, + "loss": 1.4887, + "step": 4670 + }, + { + "epoch": 0.801613180024026, + "grad_norm": 4.991378128678878, + "learning_rate": 1.994284781972361e-06, + "loss": 1.6904, + "step": 4671 + }, + { + "epoch": 0.801784794920199, + "grad_norm": 3.8939800226272063, + "learning_rate": 1.990955234649102e-06, + "loss": 1.4813, + "step": 4672 + }, + { + "epoch": 0.8019564098163721, + "grad_norm": 4.640334195680962, + "learning_rate": 1.9876281617614045e-06, + "loss": 1.536, + "step": 4673 + }, + { + "epoch": 0.8021280247125451, + "grad_norm": 5.02677592844051, + "learning_rate": 1.9843035643371865e-06, + "loss": 1.5338, + "step": 4674 + }, + { + "epoch": 0.8022996396087181, + "grad_norm": 3.88387810281538, + "learning_rate": 1.9809814434036e-06, + "loss": 1.4312, + "step": 4675 + }, + { + "epoch": 0.802471254504891, + "grad_norm": 5.085624113110938, + "learning_rate": 1.977661799987035e-06, + "loss": 1.5967, + "step": 4676 + }, + { + "epoch": 0.802642869401064, + "grad_norm": 4.10904531677117, + "learning_rate": 1.974344635113108e-06, + "loss": 1.6195, + "step": 4677 + }, + { + "epoch": 0.802814484297237, + "grad_norm": 4.139654311513187, + "learning_rate": 1.971029949806673e-06, + "loss": 1.4278, + "step": 4678 + }, + { + "epoch": 0.80298609919341, + "grad_norm": 4.133107737175155, + "learning_rate": 1.9677177450918206e-06, + "loss": 1.5947, + "step": 4679 + }, + { + "epoch": 0.803157714089583, + "grad_norm": 4.337713520500158, + "learning_rate": 1.964408021991874e-06, + "loss": 1.6304, + "step": 4680 + }, + { + "epoch": 0.8033293289857559, + "grad_norm": 4.482565791909271, + "learning_rate": 1.9611007815293926e-06, + "loss": 1.4749, + "step": 4681 + }, + { + "epoch": 0.8035009438819289, + "grad_norm": 5.0380616373243985, + "learning_rate": 1.9577960247261606e-06, + "loss": 1.6686, + "step": 4682 + }, + { + "epoch": 0.803672558778102, + "grad_norm": 3.8579526563832633, + "learning_rate": 1.954493752603199e-06, + "loss": 1.491, + "step": 4683 + }, + { + "epoch": 0.803844173674275, + "grad_norm": 4.5603977193272875, + "learning_rate": 1.951193966180763e-06, + "loss": 1.614, + "step": 4684 + }, + { + "epoch": 0.8040157885704479, + "grad_norm": 4.913323652369876, + "learning_rate": 1.9478966664783407e-06, + "loss": 1.486, + "step": 4685 + }, + { + "epoch": 0.8041874034666209, + "grad_norm": 4.069079882651872, + "learning_rate": 1.9446018545146493e-06, + "loss": 1.5907, + "step": 4686 + }, + { + "epoch": 0.8043590183627939, + "grad_norm": 3.7424499704879, + "learning_rate": 1.941309531307636e-06, + "loss": 1.4458, + "step": 4687 + }, + { + "epoch": 0.8045306332589669, + "grad_norm": 4.598478927516427, + "learning_rate": 1.9380196978744824e-06, + "loss": 1.7173, + "step": 4688 + }, + { + "epoch": 0.8047022481551399, + "grad_norm": 3.6103840107552085, + "learning_rate": 1.934732355231598e-06, + "loss": 1.4013, + "step": 4689 + }, + { + "epoch": 0.8048738630513128, + "grad_norm": 4.34924179381626, + "learning_rate": 1.9314475043946244e-06, + "loss": 1.5084, + "step": 4690 + }, + { + "epoch": 0.8050454779474858, + "grad_norm": 4.15588545836338, + "learning_rate": 1.928165146378437e-06, + "loss": 1.5732, + "step": 4691 + }, + { + "epoch": 0.8052170928436588, + "grad_norm": 3.692125388138388, + "learning_rate": 1.924885282197132e-06, + "loss": 1.3896, + "step": 4692 + }, + { + "epoch": 0.8053887077398318, + "grad_norm": 4.074850141367472, + "learning_rate": 1.921607912864042e-06, + "loss": 1.7314, + "step": 4693 + }, + { + "epoch": 0.8055603226360049, + "grad_norm": 3.229449380375219, + "learning_rate": 1.91833303939173e-06, + "loss": 1.4361, + "step": 4694 + }, + { + "epoch": 0.8057319375321778, + "grad_norm": 4.390553008453175, + "learning_rate": 1.91506066279198e-06, + "loss": 1.6182, + "step": 4695 + }, + { + "epoch": 0.8059035524283508, + "grad_norm": 4.661397986958235, + "learning_rate": 1.911790784075814e-06, + "loss": 1.4064, + "step": 4696 + }, + { + "epoch": 0.8060751673245238, + "grad_norm": 4.973239896643652, + "learning_rate": 1.908523404253474e-06, + "loss": 1.3861, + "step": 4697 + }, + { + "epoch": 0.8062467822206968, + "grad_norm": 4.3405868232545295, + "learning_rate": 1.9052585243344356e-06, + "loss": 1.5612, + "step": 4698 + }, + { + "epoch": 0.8064183971168697, + "grad_norm": 6.351442865826272, + "learning_rate": 1.9019961453274005e-06, + "loss": 1.5969, + "step": 4699 + }, + { + "epoch": 0.8065900120130427, + "grad_norm": 4.471195740534678, + "learning_rate": 1.8987362682402998e-06, + "loss": 1.5163, + "step": 4700 + }, + { + "epoch": 0.8067616269092157, + "grad_norm": 3.9708576317752065, + "learning_rate": 1.895478894080287e-06, + "loss": 1.4619, + "step": 4701 + }, + { + "epoch": 0.8069332418053887, + "grad_norm": 4.302066986283456, + "learning_rate": 1.8922240238537426e-06, + "loss": 1.506, + "step": 4702 + }, + { + "epoch": 0.8071048567015617, + "grad_norm": 4.100824876716006, + "learning_rate": 1.8889716585662766e-06, + "loss": 1.545, + "step": 4703 + }, + { + "epoch": 0.8072764715977346, + "grad_norm": 4.686480876882765, + "learning_rate": 1.885721799222725e-06, + "loss": 1.6029, + "step": 4704 + }, + { + "epoch": 0.8074480864939076, + "grad_norm": 3.9824909649244393, + "learning_rate": 1.8824744468271506e-06, + "loss": 1.4758, + "step": 4705 + }, + { + "epoch": 0.8076197013900807, + "grad_norm": 4.357171334647611, + "learning_rate": 1.879229602382837e-06, + "loss": 1.6839, + "step": 4706 + }, + { + "epoch": 0.8077913162862537, + "grad_norm": 3.955053416751871, + "learning_rate": 1.875987266892294e-06, + "loss": 1.5745, + "step": 4707 + }, + { + "epoch": 0.8079629311824267, + "grad_norm": 4.724695923305437, + "learning_rate": 1.872747441357259e-06, + "loss": 1.5799, + "step": 4708 + }, + { + "epoch": 0.8081345460785996, + "grad_norm": 4.232976906985312, + "learning_rate": 1.869510126778694e-06, + "loss": 1.5525, + "step": 4709 + }, + { + "epoch": 0.8083061609747726, + "grad_norm": 5.048713374355663, + "learning_rate": 1.8662753241567855e-06, + "loss": 1.5795, + "step": 4710 + }, + { + "epoch": 0.8084777758709456, + "grad_norm": 3.725181124131211, + "learning_rate": 1.8630430344909378e-06, + "loss": 1.6428, + "step": 4711 + }, + { + "epoch": 0.8086493907671186, + "grad_norm": 4.565425202474584, + "learning_rate": 1.8598132587797891e-06, + "loss": 1.5042, + "step": 4712 + }, + { + "epoch": 0.8088210056632916, + "grad_norm": 4.1232125966384165, + "learning_rate": 1.8565859980211898e-06, + "loss": 1.4514, + "step": 4713 + }, + { + "epoch": 0.8089926205594645, + "grad_norm": 4.727431207816188, + "learning_rate": 1.8533612532122204e-06, + "loss": 1.6301, + "step": 4714 + }, + { + "epoch": 0.8091642354556375, + "grad_norm": 4.875678069537726, + "learning_rate": 1.8501390253491858e-06, + "loss": 1.7241, + "step": 4715 + }, + { + "epoch": 0.8093358503518105, + "grad_norm": 4.4394776014894335, + "learning_rate": 1.8469193154276033e-06, + "loss": 1.6361, + "step": 4716 + }, + { + "epoch": 0.8095074652479836, + "grad_norm": 3.9581792061584764, + "learning_rate": 1.8437021244422237e-06, + "loss": 1.4852, + "step": 4717 + }, + { + "epoch": 0.8096790801441565, + "grad_norm": 4.5138373874870545, + "learning_rate": 1.8404874533870165e-06, + "loss": 1.6123, + "step": 4718 + }, + { + "epoch": 0.8098506950403295, + "grad_norm": 3.9602782813737236, + "learning_rate": 1.837275303255165e-06, + "loss": 1.6562, + "step": 4719 + }, + { + "epoch": 0.8100223099365025, + "grad_norm": 4.6732817448838935, + "learning_rate": 1.834065675039084e-06, + "loss": 1.544, + "step": 4720 + }, + { + "epoch": 0.8101939248326755, + "grad_norm": 3.9327035262853967, + "learning_rate": 1.8308585697304015e-06, + "loss": 1.5526, + "step": 4721 + }, + { + "epoch": 0.8103655397288485, + "grad_norm": 4.416909666806564, + "learning_rate": 1.8276539883199706e-06, + "loss": 1.5307, + "step": 4722 + }, + { + "epoch": 0.8105371546250214, + "grad_norm": 4.253970280231939, + "learning_rate": 1.8244519317978638e-06, + "loss": 1.6755, + "step": 4723 + }, + { + "epoch": 0.8107087695211944, + "grad_norm": 4.217879330674126, + "learning_rate": 1.8212524011533738e-06, + "loss": 1.5802, + "step": 4724 + }, + { + "epoch": 0.8108803844173674, + "grad_norm": 4.031020695680582, + "learning_rate": 1.8180553973750125e-06, + "loss": 1.4769, + "step": 4725 + }, + { + "epoch": 0.8110519993135404, + "grad_norm": 4.257534302144836, + "learning_rate": 1.8148609214505053e-06, + "loss": 1.5336, + "step": 4726 + }, + { + "epoch": 0.8112236142097135, + "grad_norm": 3.7624650914966487, + "learning_rate": 1.8116689743668047e-06, + "loss": 1.6382, + "step": 4727 + }, + { + "epoch": 0.8113952291058864, + "grad_norm": 4.483923293459603, + "learning_rate": 1.808479557110081e-06, + "loss": 1.6449, + "step": 4728 + }, + { + "epoch": 0.8115668440020594, + "grad_norm": 4.462424705003552, + "learning_rate": 1.8052926706657226e-06, + "loss": 1.4666, + "step": 4729 + }, + { + "epoch": 0.8117384588982324, + "grad_norm": 5.586791444242014, + "learning_rate": 1.8021083160183317e-06, + "loss": 1.6004, + "step": 4730 + }, + { + "epoch": 0.8119100737944054, + "grad_norm": 5.2472121785558, + "learning_rate": 1.7989264941517292e-06, + "loss": 1.5153, + "step": 4731 + }, + { + "epoch": 0.8120816886905784, + "grad_norm": 4.033707504392723, + "learning_rate": 1.7957472060489568e-06, + "loss": 1.3814, + "step": 4732 + }, + { + "epoch": 0.8122533035867513, + "grad_norm": 4.5260967878582745, + "learning_rate": 1.7925704526922728e-06, + "loss": 1.6383, + "step": 4733 + }, + { + "epoch": 0.8124249184829243, + "grad_norm": 4.129105455195443, + "learning_rate": 1.7893962350631543e-06, + "loss": 1.5586, + "step": 4734 + }, + { + "epoch": 0.8125965333790973, + "grad_norm": 3.376448378450376, + "learning_rate": 1.786224554142285e-06, + "loss": 1.3723, + "step": 4735 + }, + { + "epoch": 0.8127681482752703, + "grad_norm": 4.13608464373614, + "learning_rate": 1.78305541090958e-06, + "loss": 1.5269, + "step": 4736 + }, + { + "epoch": 0.8129397631714432, + "grad_norm": 4.216916110100886, + "learning_rate": 1.7798888063441556e-06, + "loss": 1.4988, + "step": 4737 + }, + { + "epoch": 0.8131113780676162, + "grad_norm": 4.021696832185047, + "learning_rate": 1.776724741424354e-06, + "loss": 1.323, + "step": 4738 + }, + { + "epoch": 0.8132829929637893, + "grad_norm": 3.4524435224767487, + "learning_rate": 1.7735632171277295e-06, + "loss": 1.3778, + "step": 4739 + }, + { + "epoch": 0.8134546078599623, + "grad_norm": 3.8863716758862914, + "learning_rate": 1.770404234431049e-06, + "loss": 1.5976, + "step": 4740 + }, + { + "epoch": 0.8136262227561353, + "grad_norm": 4.906590054353688, + "learning_rate": 1.7672477943102984e-06, + "loss": 1.57, + "step": 4741 + }, + { + "epoch": 0.8137978376523082, + "grad_norm": 4.204491219090956, + "learning_rate": 1.7640938977406786e-06, + "loss": 1.5763, + "step": 4742 + }, + { + "epoch": 0.8139694525484812, + "grad_norm": 4.640025319060085, + "learning_rate": 1.7609425456965957e-06, + "loss": 1.5754, + "step": 4743 + }, + { + "epoch": 0.8141410674446542, + "grad_norm": 3.970601891040093, + "learning_rate": 1.757793739151683e-06, + "loss": 1.7266, + "step": 4744 + }, + { + "epoch": 0.8143126823408272, + "grad_norm": 4.182392166077962, + "learning_rate": 1.7546474790787737e-06, + "loss": 1.4951, + "step": 4745 + }, + { + "epoch": 0.8144842972370002, + "grad_norm": 3.7449454443494186, + "learning_rate": 1.7515037664499257e-06, + "loss": 1.3469, + "step": 4746 + }, + { + "epoch": 0.8146559121331731, + "grad_norm": 3.8282159102359445, + "learning_rate": 1.748362602236403e-06, + "loss": 1.4556, + "step": 4747 + }, + { + "epoch": 0.8148275270293461, + "grad_norm": 3.815868293526736, + "learning_rate": 1.7452239874086884e-06, + "loss": 1.2008, + "step": 4748 + }, + { + "epoch": 0.8149991419255191, + "grad_norm": 4.333051862056449, + "learning_rate": 1.7420879229364695e-06, + "loss": 1.6515, + "step": 4749 + }, + { + "epoch": 0.8151707568216922, + "grad_norm": 3.1891068396424935, + "learning_rate": 1.738954409788648e-06, + "loss": 1.2357, + "step": 4750 + }, + { + "epoch": 0.815342371717865, + "grad_norm": 4.017193360766739, + "learning_rate": 1.7358234489333415e-06, + "loss": 1.5298, + "step": 4751 + }, + { + "epoch": 0.8155139866140381, + "grad_norm": 3.964710874867819, + "learning_rate": 1.7326950413378752e-06, + "loss": 1.2864, + "step": 4752 + }, + { + "epoch": 0.8156856015102111, + "grad_norm": 4.362086560697036, + "learning_rate": 1.7295691879687904e-06, + "loss": 1.755, + "step": 4753 + }, + { + "epoch": 0.8158572164063841, + "grad_norm": 3.8706716414158744, + "learning_rate": 1.7264458897918335e-06, + "loss": 1.443, + "step": 4754 + }, + { + "epoch": 0.8160288313025571, + "grad_norm": 4.1645769703681506, + "learning_rate": 1.7233251477719593e-06, + "loss": 1.6262, + "step": 4755 + }, + { + "epoch": 0.81620044619873, + "grad_norm": 4.740649624803317, + "learning_rate": 1.7202069628733398e-06, + "loss": 1.7114, + "step": 4756 + }, + { + "epoch": 0.816372061094903, + "grad_norm": 4.422538843552403, + "learning_rate": 1.7170913360593566e-06, + "loss": 1.4931, + "step": 4757 + }, + { + "epoch": 0.816543675991076, + "grad_norm": 3.7528528981885745, + "learning_rate": 1.7139782682925988e-06, + "loss": 1.5633, + "step": 4758 + }, + { + "epoch": 0.816715290887249, + "grad_norm": 3.7275410790321786, + "learning_rate": 1.71086776053486e-06, + "loss": 1.5592, + "step": 4759 + }, + { + "epoch": 0.816886905783422, + "grad_norm": 4.2392713519734535, + "learning_rate": 1.707759813747153e-06, + "loss": 1.3791, + "step": 4760 + }, + { + "epoch": 0.817058520679595, + "grad_norm": 4.133337514841396, + "learning_rate": 1.7046544288896893e-06, + "loss": 1.3988, + "step": 4761 + }, + { + "epoch": 0.817230135575768, + "grad_norm": 4.0069726982922385, + "learning_rate": 1.7015516069218962e-06, + "loss": 1.5614, + "step": 4762 + }, + { + "epoch": 0.817401750471941, + "grad_norm": 3.7808619741729825, + "learning_rate": 1.6984513488024068e-06, + "loss": 1.3589, + "step": 4763 + }, + { + "epoch": 0.817573365368114, + "grad_norm": 3.6219637454769065, + "learning_rate": 1.6953536554890593e-06, + "loss": 1.4129, + "step": 4764 + }, + { + "epoch": 0.817744980264287, + "grad_norm": 6.2183632778325855, + "learning_rate": 1.6922585279389037e-06, + "loss": 1.6128, + "step": 4765 + }, + { + "epoch": 0.8179165951604599, + "grad_norm": 4.232690515583152, + "learning_rate": 1.6891659671081983e-06, + "loss": 1.4493, + "step": 4766 + }, + { + "epoch": 0.8180882100566329, + "grad_norm": 4.328589401136215, + "learning_rate": 1.6860759739524003e-06, + "loss": 1.6701, + "step": 4767 + }, + { + "epoch": 0.8182598249528059, + "grad_norm": 3.515764342831475, + "learning_rate": 1.6829885494261832e-06, + "loss": 1.2537, + "step": 4768 + }, + { + "epoch": 0.8184314398489789, + "grad_norm": 4.523942785750915, + "learning_rate": 1.6799036944834191e-06, + "loss": 1.3621, + "step": 4769 + }, + { + "epoch": 0.8186030547451518, + "grad_norm": 4.123277429371624, + "learning_rate": 1.6768214100771917e-06, + "loss": 1.4811, + "step": 4770 + }, + { + "epoch": 0.8187746696413248, + "grad_norm": 4.110616187162248, + "learning_rate": 1.6737416971597876e-06, + "loss": 1.547, + "step": 4771 + }, + { + "epoch": 0.8189462845374978, + "grad_norm": 3.859634526065418, + "learning_rate": 1.670664556682705e-06, + "loss": 1.6499, + "step": 4772 + }, + { + "epoch": 0.8191178994336709, + "grad_norm": 3.869249966716569, + "learning_rate": 1.6675899895966374e-06, + "loss": 1.3126, + "step": 4773 + }, + { + "epoch": 0.8192895143298439, + "grad_norm": 3.946414039882373, + "learning_rate": 1.6645179968514858e-06, + "loss": 1.4108, + "step": 4774 + }, + { + "epoch": 0.8194611292260168, + "grad_norm": 4.381632964889389, + "learning_rate": 1.6614485793963619e-06, + "loss": 1.5608, + "step": 4775 + }, + { + "epoch": 0.8196327441221898, + "grad_norm": 3.692932448305564, + "learning_rate": 1.6583817381795775e-06, + "loss": 1.5078, + "step": 4776 + }, + { + "epoch": 0.8198043590183628, + "grad_norm": 3.5047680662497878, + "learning_rate": 1.655317474148652e-06, + "loss": 1.3324, + "step": 4777 + }, + { + "epoch": 0.8199759739145358, + "grad_norm": 4.581145451349372, + "learning_rate": 1.652255788250301e-06, + "loss": 1.7512, + "step": 4778 + }, + { + "epoch": 0.8201475888107088, + "grad_norm": 4.000911757978101, + "learning_rate": 1.6491966814304483e-06, + "loss": 1.4089, + "step": 4779 + }, + { + "epoch": 0.8203192037068817, + "grad_norm": 4.5360554149582395, + "learning_rate": 1.6461401546342214e-06, + "loss": 1.5827, + "step": 4780 + }, + { + "epoch": 0.8204908186030547, + "grad_norm": 4.479289803549678, + "learning_rate": 1.6430862088059497e-06, + "loss": 1.5964, + "step": 4781 + }, + { + "epoch": 0.8206624334992277, + "grad_norm": 4.987443943667715, + "learning_rate": 1.64003484488917e-06, + "loss": 1.5473, + "step": 4782 + }, + { + "epoch": 0.8208340483954008, + "grad_norm": 4.239196834024588, + "learning_rate": 1.63698606382661e-06, + "loss": 1.6608, + "step": 4783 + }, + { + "epoch": 0.8210056632915738, + "grad_norm": 3.909332322679399, + "learning_rate": 1.6339398665602112e-06, + "loss": 1.7064, + "step": 4784 + }, + { + "epoch": 0.8211772781877467, + "grad_norm": 4.0157885244486256, + "learning_rate": 1.630896254031108e-06, + "loss": 1.4625, + "step": 4785 + }, + { + "epoch": 0.8213488930839197, + "grad_norm": 4.699746618327083, + "learning_rate": 1.6278552271796422e-06, + "loss": 1.5505, + "step": 4786 + }, + { + "epoch": 0.8215205079800927, + "grad_norm": 4.451636242883355, + "learning_rate": 1.624816786945358e-06, + "loss": 1.3828, + "step": 4787 + }, + { + "epoch": 0.8216921228762657, + "grad_norm": 4.109800815390824, + "learning_rate": 1.6217809342669898e-06, + "loss": 1.4333, + "step": 4788 + }, + { + "epoch": 0.8218637377724386, + "grad_norm": 3.6873793313267527, + "learning_rate": 1.6187476700824855e-06, + "loss": 1.4101, + "step": 4789 + }, + { + "epoch": 0.8220353526686116, + "grad_norm": 3.985709483580345, + "learning_rate": 1.6157169953289876e-06, + "loss": 1.5512, + "step": 4790 + }, + { + "epoch": 0.8222069675647846, + "grad_norm": 4.202289954357266, + "learning_rate": 1.6126889109428345e-06, + "loss": 1.5665, + "step": 4791 + }, + { + "epoch": 0.8223785824609576, + "grad_norm": 4.612864097344822, + "learning_rate": 1.609663417859575e-06, + "loss": 1.3557, + "step": 4792 + }, + { + "epoch": 0.8225501973571306, + "grad_norm": 3.8126135954779325, + "learning_rate": 1.6066405170139431e-06, + "loss": 1.2275, + "step": 4793 + }, + { + "epoch": 0.8227218122533035, + "grad_norm": 4.203206346147437, + "learning_rate": 1.6036202093398833e-06, + "loss": 1.6516, + "step": 4794 + }, + { + "epoch": 0.8228934271494766, + "grad_norm": 4.69713864144923, + "learning_rate": 1.6006024957705357e-06, + "loss": 1.6989, + "step": 4795 + }, + { + "epoch": 0.8230650420456496, + "grad_norm": 4.15576583508913, + "learning_rate": 1.5975873772382399e-06, + "loss": 1.5273, + "step": 4796 + }, + { + "epoch": 0.8232366569418226, + "grad_norm": 3.7490580552955493, + "learning_rate": 1.594574854674531e-06, + "loss": 1.3211, + "step": 4797 + }, + { + "epoch": 0.8234082718379956, + "grad_norm": 4.282943635534798, + "learning_rate": 1.59156492901014e-06, + "loss": 1.3939, + "step": 4798 + }, + { + "epoch": 0.8235798867341685, + "grad_norm": 4.76895852168925, + "learning_rate": 1.5885576011750004e-06, + "loss": 1.4788, + "step": 4799 + }, + { + "epoch": 0.8237515016303415, + "grad_norm": 3.8300849614430077, + "learning_rate": 1.5855528720982438e-06, + "loss": 1.4255, + "step": 4800 + }, + { + "epoch": 0.8239231165265145, + "grad_norm": 5.016029711279405, + "learning_rate": 1.5825507427081976e-06, + "loss": 1.4969, + "step": 4801 + }, + { + "epoch": 0.8240947314226875, + "grad_norm": 3.6640126225191882, + "learning_rate": 1.5795512139323832e-06, + "loss": 1.4868, + "step": 4802 + }, + { + "epoch": 0.8242663463188605, + "grad_norm": 4.36041446119268, + "learning_rate": 1.5765542866975181e-06, + "loss": 1.7193, + "step": 4803 + }, + { + "epoch": 0.8244379612150334, + "grad_norm": 4.12413500391701, + "learning_rate": 1.5735599619295206e-06, + "loss": 1.5277, + "step": 4804 + }, + { + "epoch": 0.8246095761112064, + "grad_norm": 3.475721673600634, + "learning_rate": 1.5705682405535028e-06, + "loss": 1.3113, + "step": 4805 + }, + { + "epoch": 0.8247811910073795, + "grad_norm": 3.7523614754537133, + "learning_rate": 1.5675791234937753e-06, + "loss": 1.5003, + "step": 4806 + }, + { + "epoch": 0.8249528059035525, + "grad_norm": 4.476343290594001, + "learning_rate": 1.5645926116738353e-06, + "loss": 1.4724, + "step": 4807 + }, + { + "epoch": 0.8251244207997254, + "grad_norm": 3.658220569229897, + "learning_rate": 1.5616087060163865e-06, + "loss": 1.4263, + "step": 4808 + }, + { + "epoch": 0.8252960356958984, + "grad_norm": 4.016234509455902, + "learning_rate": 1.5586274074433171e-06, + "loss": 1.4076, + "step": 4809 + }, + { + "epoch": 0.8254676505920714, + "grad_norm": 4.508069012498651, + "learning_rate": 1.5556487168757161e-06, + "loss": 1.6508, + "step": 4810 + }, + { + "epoch": 0.8256392654882444, + "grad_norm": 3.976340598480248, + "learning_rate": 1.5526726352338695e-06, + "loss": 1.5211, + "step": 4811 + }, + { + "epoch": 0.8258108803844174, + "grad_norm": 4.101646086204409, + "learning_rate": 1.549699163437247e-06, + "loss": 1.4162, + "step": 4812 + }, + { + "epoch": 0.8259824952805903, + "grad_norm": 3.453704204465394, + "learning_rate": 1.54672830240452e-06, + "loss": 1.3222, + "step": 4813 + }, + { + "epoch": 0.8261541101767633, + "grad_norm": 3.3381820210281647, + "learning_rate": 1.5437600530535545e-06, + "loss": 1.2713, + "step": 4814 + }, + { + "epoch": 0.8263257250729363, + "grad_norm": 4.318833631122905, + "learning_rate": 1.540794416301402e-06, + "loss": 1.5501, + "step": 4815 + }, + { + "epoch": 0.8264973399691093, + "grad_norm": 4.051409876299266, + "learning_rate": 1.5378313930643141e-06, + "loss": 1.5782, + "step": 4816 + }, + { + "epoch": 0.8266689548652824, + "grad_norm": 3.892167570481565, + "learning_rate": 1.5348709842577302e-06, + "loss": 1.5381, + "step": 4817 + }, + { + "epoch": 0.8268405697614553, + "grad_norm": 4.539698615082214, + "learning_rate": 1.5319131907962837e-06, + "loss": 1.4202, + "step": 4818 + }, + { + "epoch": 0.8270121846576283, + "grad_norm": 4.287637444952206, + "learning_rate": 1.528958013593801e-06, + "loss": 1.4394, + "step": 4819 + }, + { + "epoch": 0.8271837995538013, + "grad_norm": 5.100675825487883, + "learning_rate": 1.5260054535633018e-06, + "loss": 1.4594, + "step": 4820 + }, + { + "epoch": 0.8273554144499743, + "grad_norm": 3.6114765169500624, + "learning_rate": 1.523055511616992e-06, + "loss": 1.4732, + "step": 4821 + }, + { + "epoch": 0.8275270293461472, + "grad_norm": 4.5860065102868015, + "learning_rate": 1.5201081886662695e-06, + "loss": 1.5918, + "step": 4822 + }, + { + "epoch": 0.8276986442423202, + "grad_norm": 4.490787211977917, + "learning_rate": 1.5171634856217265e-06, + "loss": 1.5841, + "step": 4823 + }, + { + "epoch": 0.8278702591384932, + "grad_norm": 4.267019101678434, + "learning_rate": 1.5142214033931447e-06, + "loss": 1.4969, + "step": 4824 + }, + { + "epoch": 0.8280418740346662, + "grad_norm": 3.842204597012564, + "learning_rate": 1.5112819428894976e-06, + "loss": 1.4329, + "step": 4825 + }, + { + "epoch": 0.8282134889308392, + "grad_norm": 4.497900159979859, + "learning_rate": 1.5083451050189446e-06, + "loss": 1.3571, + "step": 4826 + }, + { + "epoch": 0.8283851038270121, + "grad_norm": 4.387558894410822, + "learning_rate": 1.5054108906888342e-06, + "loss": 1.5545, + "step": 4827 + }, + { + "epoch": 0.8285567187231851, + "grad_norm": 4.432256994511522, + "learning_rate": 1.5024793008057093e-06, + "loss": 1.5436, + "step": 4828 + }, + { + "epoch": 0.8287283336193582, + "grad_norm": 4.223784179180186, + "learning_rate": 1.4995503362753006e-06, + "loss": 1.6514, + "step": 4829 + }, + { + "epoch": 0.8288999485155312, + "grad_norm": 4.565491457944558, + "learning_rate": 1.4966239980025278e-06, + "loss": 1.4604, + "step": 4830 + }, + { + "epoch": 0.8290715634117042, + "grad_norm": 4.046117453594091, + "learning_rate": 1.493700286891494e-06, + "loss": 1.3684, + "step": 4831 + }, + { + "epoch": 0.8292431783078771, + "grad_norm": 3.900408343149284, + "learning_rate": 1.4907792038454994e-06, + "loss": 1.5458, + "step": 4832 + }, + { + "epoch": 0.8294147932040501, + "grad_norm": 5.6475835022018614, + "learning_rate": 1.4878607497670229e-06, + "loss": 1.6353, + "step": 4833 + }, + { + "epoch": 0.8295864081002231, + "grad_norm": 4.502249600480854, + "learning_rate": 1.4849449255577386e-06, + "loss": 1.5623, + "step": 4834 + }, + { + "epoch": 0.8297580229963961, + "grad_norm": 3.297642927712247, + "learning_rate": 1.4820317321185074e-06, + "loss": 1.1199, + "step": 4835 + }, + { + "epoch": 0.8299296378925691, + "grad_norm": 3.469177220456171, + "learning_rate": 1.4791211703493713e-06, + "loss": 1.4682, + "step": 4836 + }, + { + "epoch": 0.830101252788742, + "grad_norm": 3.687979980120009, + "learning_rate": 1.4762132411495644e-06, + "loss": 1.4484, + "step": 4837 + }, + { + "epoch": 0.830272867684915, + "grad_norm": 4.028501921288012, + "learning_rate": 1.4733079454175102e-06, + "loss": 1.4705, + "step": 4838 + }, + { + "epoch": 0.830444482581088, + "grad_norm": 4.243605256930422, + "learning_rate": 1.4704052840508087e-06, + "loss": 1.4964, + "step": 4839 + }, + { + "epoch": 0.8306160974772611, + "grad_norm": 4.513967087383656, + "learning_rate": 1.4675052579462578e-06, + "loss": 1.5293, + "step": 4840 + }, + { + "epoch": 0.830787712373434, + "grad_norm": 4.109978109307644, + "learning_rate": 1.464607867999831e-06, + "loss": 1.4468, + "step": 4841 + }, + { + "epoch": 0.830959327269607, + "grad_norm": 3.780967028429485, + "learning_rate": 1.4617131151066933e-06, + "loss": 1.4257, + "step": 4842 + }, + { + "epoch": 0.83113094216578, + "grad_norm": 4.9374045544993255, + "learning_rate": 1.4588210001611923e-06, + "loss": 1.4617, + "step": 4843 + }, + { + "epoch": 0.831302557061953, + "grad_norm": 4.334631495387098, + "learning_rate": 1.4559315240568672e-06, + "loss": 1.5331, + "step": 4844 + }, + { + "epoch": 0.831474171958126, + "grad_norm": 4.182110435102105, + "learning_rate": 1.4530446876864324e-06, + "loss": 1.5767, + "step": 4845 + }, + { + "epoch": 0.8316457868542989, + "grad_norm": 4.413169523866981, + "learning_rate": 1.4501604919417878e-06, + "loss": 1.7041, + "step": 4846 + }, + { + "epoch": 0.8318174017504719, + "grad_norm": 4.363013721531667, + "learning_rate": 1.447278937714024e-06, + "loss": 1.5366, + "step": 4847 + }, + { + "epoch": 0.8319890166466449, + "grad_norm": 4.63214397270743, + "learning_rate": 1.444400025893412e-06, + "loss": 1.6583, + "step": 4848 + }, + { + "epoch": 0.8321606315428179, + "grad_norm": 3.958468464272295, + "learning_rate": 1.4415237573694086e-06, + "loss": 1.5197, + "step": 4849 + }, + { + "epoch": 0.832332246438991, + "grad_norm": 3.558106307386517, + "learning_rate": 1.438650133030648e-06, + "loss": 1.5807, + "step": 4850 + }, + { + "epoch": 0.8325038613351639, + "grad_norm": 4.445468644247436, + "learning_rate": 1.4357791537649512e-06, + "loss": 1.6686, + "step": 4851 + }, + { + "epoch": 0.8326754762313369, + "grad_norm": 4.2650448005872805, + "learning_rate": 1.4329108204593235e-06, + "loss": 1.6139, + "step": 4852 + }, + { + "epoch": 0.8328470911275099, + "grad_norm": 4.5603046536703244, + "learning_rate": 1.43004513399995e-06, + "loss": 1.5786, + "step": 4853 + }, + { + "epoch": 0.8330187060236829, + "grad_norm": 3.9397265358772655, + "learning_rate": 1.4271820952722038e-06, + "loss": 1.5033, + "step": 4854 + }, + { + "epoch": 0.8331903209198559, + "grad_norm": 4.146378090835399, + "learning_rate": 1.4243217051606285e-06, + "loss": 1.4237, + "step": 4855 + }, + { + "epoch": 0.8333619358160288, + "grad_norm": 3.4503975903945916, + "learning_rate": 1.4214639645489625e-06, + "loss": 1.3647, + "step": 4856 + }, + { + "epoch": 0.8335335507122018, + "grad_norm": 3.9253066351318475, + "learning_rate": 1.4186088743201144e-06, + "loss": 1.8014, + "step": 4857 + }, + { + "epoch": 0.8337051656083748, + "grad_norm": 4.089072348629632, + "learning_rate": 1.4157564353561815e-06, + "loss": 1.4447, + "step": 4858 + }, + { + "epoch": 0.8338767805045478, + "grad_norm": 5.051106196694771, + "learning_rate": 1.4129066485384413e-06, + "loss": 1.5858, + "step": 4859 + }, + { + "epoch": 0.8340483954007207, + "grad_norm": 5.351557048739141, + "learning_rate": 1.410059514747345e-06, + "loss": 1.6448, + "step": 4860 + }, + { + "epoch": 0.8342200102968937, + "grad_norm": 4.247637833490804, + "learning_rate": 1.4072150348625336e-06, + "loss": 1.5563, + "step": 4861 + }, + { + "epoch": 0.8343916251930668, + "grad_norm": 3.9093476089540244, + "learning_rate": 1.4043732097628239e-06, + "loss": 1.3501, + "step": 4862 + }, + { + "epoch": 0.8345632400892398, + "grad_norm": 3.712540712754464, + "learning_rate": 1.401534040326209e-06, + "loss": 1.4207, + "step": 4863 + }, + { + "epoch": 0.8347348549854128, + "grad_norm": 3.785723576531704, + "learning_rate": 1.3986975274298687e-06, + "loss": 1.4789, + "step": 4864 + }, + { + "epoch": 0.8349064698815857, + "grad_norm": 3.7824480604787802, + "learning_rate": 1.395863671950155e-06, + "loss": 1.5411, + "step": 4865 + }, + { + "epoch": 0.8350780847777587, + "grad_norm": 4.009935612798529, + "learning_rate": 1.3930324747626034e-06, + "loss": 1.4992, + "step": 4866 + }, + { + "epoch": 0.8352496996739317, + "grad_norm": 4.213824742038228, + "learning_rate": 1.3902039367419262e-06, + "loss": 1.5721, + "step": 4867 + }, + { + "epoch": 0.8354213145701047, + "grad_norm": 5.197594748157084, + "learning_rate": 1.3873780587620178e-06, + "loss": 1.5087, + "step": 4868 + }, + { + "epoch": 0.8355929294662777, + "grad_norm": 4.160118945048529, + "learning_rate": 1.3845548416959464e-06, + "loss": 1.4403, + "step": 4869 + }, + { + "epoch": 0.8357645443624506, + "grad_norm": 3.6151630356188655, + "learning_rate": 1.3817342864159555e-06, + "loss": 1.4555, + "step": 4870 + }, + { + "epoch": 0.8359361592586236, + "grad_norm": 3.7273819753883157, + "learning_rate": 1.3789163937934735e-06, + "loss": 1.4141, + "step": 4871 + }, + { + "epoch": 0.8361077741547966, + "grad_norm": 4.351352865307784, + "learning_rate": 1.3761011646991019e-06, + "loss": 1.5309, + "step": 4872 + }, + { + "epoch": 0.8362793890509697, + "grad_norm": 5.068828150582419, + "learning_rate": 1.3732886000026235e-06, + "loss": 1.5127, + "step": 4873 + }, + { + "epoch": 0.8364510039471426, + "grad_norm": 4.903164839468747, + "learning_rate": 1.3704787005729914e-06, + "loss": 1.4871, + "step": 4874 + }, + { + "epoch": 0.8366226188433156, + "grad_norm": 4.000762469224886, + "learning_rate": 1.3676714672783376e-06, + "loss": 1.7044, + "step": 4875 + }, + { + "epoch": 0.8367942337394886, + "grad_norm": 4.224156600248199, + "learning_rate": 1.3648669009859716e-06, + "loss": 1.292, + "step": 4876 + }, + { + "epoch": 0.8369658486356616, + "grad_norm": 3.849538820719491, + "learning_rate": 1.3620650025623805e-06, + "loss": 1.4644, + "step": 4877 + }, + { + "epoch": 0.8371374635318346, + "grad_norm": 4.2560570899830905, + "learning_rate": 1.3592657728732262e-06, + "loss": 1.8338, + "step": 4878 + }, + { + "epoch": 0.8373090784280075, + "grad_norm": 3.586444096943704, + "learning_rate": 1.35646921278334e-06, + "loss": 1.4924, + "step": 4879 + }, + { + "epoch": 0.8374806933241805, + "grad_norm": 4.701462251937326, + "learning_rate": 1.35367532315674e-06, + "loss": 1.5888, + "step": 4880 + }, + { + "epoch": 0.8376523082203535, + "grad_norm": 4.039342999633316, + "learning_rate": 1.350884104856608e-06, + "loss": 1.5599, + "step": 4881 + }, + { + "epoch": 0.8378239231165265, + "grad_norm": 5.105211189598625, + "learning_rate": 1.348095558745306e-06, + "loss": 1.5338, + "step": 4882 + }, + { + "epoch": 0.8379955380126995, + "grad_norm": 4.124177488479943, + "learning_rate": 1.3453096856843716e-06, + "loss": 1.5136, + "step": 4883 + }, + { + "epoch": 0.8381671529088724, + "grad_norm": 4.008081851658497, + "learning_rate": 1.3425264865345134e-06, + "loss": 1.2128, + "step": 4884 + }, + { + "epoch": 0.8383387678050455, + "grad_norm": 4.953653803735307, + "learning_rate": 1.339745962155613e-06, + "loss": 1.5761, + "step": 4885 + }, + { + "epoch": 0.8385103827012185, + "grad_norm": 3.6739364663311913, + "learning_rate": 1.3369681134067314e-06, + "loss": 1.5181, + "step": 4886 + }, + { + "epoch": 0.8386819975973915, + "grad_norm": 4.060577047166199, + "learning_rate": 1.3341929411460986e-06, + "loss": 1.4968, + "step": 4887 + }, + { + "epoch": 0.8388536124935645, + "grad_norm": 4.229375995298623, + "learning_rate": 1.3314204462311186e-06, + "loss": 1.3645, + "step": 4888 + }, + { + "epoch": 0.8390252273897374, + "grad_norm": 4.906805488778325, + "learning_rate": 1.3286506295183643e-06, + "loss": 1.4489, + "step": 4889 + }, + { + "epoch": 0.8391968422859104, + "grad_norm": 4.63708358627252, + "learning_rate": 1.3258834918635865e-06, + "loss": 1.6983, + "step": 4890 + }, + { + "epoch": 0.8393684571820834, + "grad_norm": 3.884785921572161, + "learning_rate": 1.3231190341217081e-06, + "loss": 1.4352, + "step": 4891 + }, + { + "epoch": 0.8395400720782564, + "grad_norm": 4.8258368195583765, + "learning_rate": 1.3203572571468238e-06, + "loss": 1.4187, + "step": 4892 + }, + { + "epoch": 0.8397116869744293, + "grad_norm": 4.848819391784695, + "learning_rate": 1.3175981617921974e-06, + "loss": 1.5742, + "step": 4893 + }, + { + "epoch": 0.8398833018706023, + "grad_norm": 5.020448839612587, + "learning_rate": 1.3148417489102628e-06, + "loss": 1.654, + "step": 4894 + }, + { + "epoch": 0.8400549167667754, + "grad_norm": 5.224156094790471, + "learning_rate": 1.3120880193526297e-06, + "loss": 1.6418, + "step": 4895 + }, + { + "epoch": 0.8402265316629484, + "grad_norm": 4.867361771294044, + "learning_rate": 1.3093369739700768e-06, + "loss": 1.4601, + "step": 4896 + }, + { + "epoch": 0.8403981465591214, + "grad_norm": 4.696972094899303, + "learning_rate": 1.306588613612557e-06, + "loss": 1.5774, + "step": 4897 + }, + { + "epoch": 0.8405697614552943, + "grad_norm": 4.833801867960432, + "learning_rate": 1.3038429391291862e-06, + "loss": 1.6262, + "step": 4898 + }, + { + "epoch": 0.8407413763514673, + "grad_norm": 5.050016630207017, + "learning_rate": 1.3010999513682586e-06, + "loss": 1.4824, + "step": 4899 + }, + { + "epoch": 0.8409129912476403, + "grad_norm": 4.516460164717836, + "learning_rate": 1.298359651177229e-06, + "loss": 1.5337, + "step": 4900 + }, + { + "epoch": 0.8410846061438133, + "grad_norm": 4.131405454190068, + "learning_rate": 1.2956220394027309e-06, + "loss": 1.4123, + "step": 4901 + }, + { + "epoch": 0.8412562210399863, + "grad_norm": 4.619958398703142, + "learning_rate": 1.2928871168905643e-06, + "loss": 1.8099, + "step": 4902 + }, + { + "epoch": 0.8414278359361592, + "grad_norm": 4.142511540821578, + "learning_rate": 1.2901548844856948e-06, + "loss": 1.5955, + "step": 4903 + }, + { + "epoch": 0.8415994508323322, + "grad_norm": 4.157974422323813, + "learning_rate": 1.2874253430322604e-06, + "loss": 1.5653, + "step": 4904 + }, + { + "epoch": 0.8417710657285052, + "grad_norm": 4.3324850075218, + "learning_rate": 1.2846984933735696e-06, + "loss": 1.6068, + "step": 4905 + }, + { + "epoch": 0.8419426806246783, + "grad_norm": 3.551207823278202, + "learning_rate": 1.281974336352092e-06, + "loss": 1.3378, + "step": 4906 + }, + { + "epoch": 0.8421142955208513, + "grad_norm": 4.0570514944507305, + "learning_rate": 1.2792528728094756e-06, + "loss": 1.375, + "step": 4907 + }, + { + "epoch": 0.8422859104170242, + "grad_norm": 4.149422610434124, + "learning_rate": 1.2765341035865253e-06, + "loss": 1.5898, + "step": 4908 + }, + { + "epoch": 0.8424575253131972, + "grad_norm": 4.00378279437493, + "learning_rate": 1.2738180295232206e-06, + "loss": 1.5508, + "step": 4909 + }, + { + "epoch": 0.8426291402093702, + "grad_norm": 3.874874270494233, + "learning_rate": 1.2711046514587067e-06, + "loss": 1.5235, + "step": 4910 + }, + { + "epoch": 0.8428007551055432, + "grad_norm": 4.733481077862293, + "learning_rate": 1.2683939702312986e-06, + "loss": 1.6161, + "step": 4911 + }, + { + "epoch": 0.8429723700017161, + "grad_norm": 3.905714841185302, + "learning_rate": 1.2656859866784721e-06, + "loss": 1.4473, + "step": 4912 + }, + { + "epoch": 0.8431439848978891, + "grad_norm": 4.001001903573795, + "learning_rate": 1.2629807016368712e-06, + "loss": 1.4721, + "step": 4913 + }, + { + "epoch": 0.8433155997940621, + "grad_norm": 4.717438227131749, + "learning_rate": 1.2602781159423095e-06, + "loss": 1.6382, + "step": 4914 + }, + { + "epoch": 0.8434872146902351, + "grad_norm": 4.286179263292802, + "learning_rate": 1.2575782304297647e-06, + "loss": 1.5085, + "step": 4915 + }, + { + "epoch": 0.8436588295864081, + "grad_norm": 4.080322141344338, + "learning_rate": 1.2548810459333816e-06, + "loss": 1.6588, + "step": 4916 + }, + { + "epoch": 0.843830444482581, + "grad_norm": 5.021944623988521, + "learning_rate": 1.2521865632864694e-06, + "loss": 1.5803, + "step": 4917 + }, + { + "epoch": 0.844002059378754, + "grad_norm": 4.175139683115828, + "learning_rate": 1.2494947833214976e-06, + "loss": 1.2728, + "step": 4918 + }, + { + "epoch": 0.8441736742749271, + "grad_norm": 4.364286458913085, + "learning_rate": 1.24680570687011e-06, + "loss": 1.3974, + "step": 4919 + }, + { + "epoch": 0.8443452891711001, + "grad_norm": 4.732550637583035, + "learning_rate": 1.2441193347631086e-06, + "loss": 1.3219, + "step": 4920 + }, + { + "epoch": 0.8445169040672731, + "grad_norm": 4.433737436142455, + "learning_rate": 1.2414356678304652e-06, + "loss": 1.5013, + "step": 4921 + }, + { + "epoch": 0.844688518963446, + "grad_norm": 5.3651236085945575, + "learning_rate": 1.2387547069013073e-06, + "loss": 1.4414, + "step": 4922 + }, + { + "epoch": 0.844860133859619, + "grad_norm": 5.52513695681018, + "learning_rate": 1.236076452803937e-06, + "loss": 1.6948, + "step": 4923 + }, + { + "epoch": 0.845031748755792, + "grad_norm": 3.537381656573495, + "learning_rate": 1.2334009063658103e-06, + "loss": 1.4957, + "step": 4924 + }, + { + "epoch": 0.845203363651965, + "grad_norm": 4.492606462761248, + "learning_rate": 1.2307280684135514e-06, + "loss": 1.3089, + "step": 4925 + }, + { + "epoch": 0.8453749785481379, + "grad_norm": 4.3620071337405895, + "learning_rate": 1.2280579397729507e-06, + "loss": 1.5648, + "step": 4926 + }, + { + "epoch": 0.8455465934443109, + "grad_norm": 4.169530162235752, + "learning_rate": 1.2253905212689554e-06, + "loss": 1.628, + "step": 4927 + }, + { + "epoch": 0.845718208340484, + "grad_norm": 4.030526324953615, + "learning_rate": 1.2227258137256782e-06, + "loss": 1.3884, + "step": 4928 + }, + { + "epoch": 0.845889823236657, + "grad_norm": 4.409947364191878, + "learning_rate": 1.2200638179663959e-06, + "loss": 1.3175, + "step": 4929 + }, + { + "epoch": 0.84606143813283, + "grad_norm": 4.162913692704332, + "learning_rate": 1.2174045348135433e-06, + "loss": 1.4428, + "step": 4930 + }, + { + "epoch": 0.8462330530290029, + "grad_norm": 3.6697022807315673, + "learning_rate": 1.214747965088723e-06, + "loss": 1.5566, + "step": 4931 + }, + { + "epoch": 0.8464046679251759, + "grad_norm": 3.9670220174527615, + "learning_rate": 1.2120941096126925e-06, + "loss": 1.4143, + "step": 4932 + }, + { + "epoch": 0.8465762828213489, + "grad_norm": 4.13642767727758, + "learning_rate": 1.2094429692053745e-06, + "loss": 1.611, + "step": 4933 + }, + { + "epoch": 0.8467478977175219, + "grad_norm": 4.2563311703541995, + "learning_rate": 1.2067945446858542e-06, + "loss": 1.5385, + "step": 4934 + }, + { + "epoch": 0.8469195126136949, + "grad_norm": 3.8465462998280637, + "learning_rate": 1.2041488368723763e-06, + "loss": 1.5377, + "step": 4935 + }, + { + "epoch": 0.8470911275098678, + "grad_norm": 3.7779903681907374, + "learning_rate": 1.2015058465823458e-06, + "loss": 1.4495, + "step": 4936 + }, + { + "epoch": 0.8472627424060408, + "grad_norm": 4.153909268677651, + "learning_rate": 1.1988655746323242e-06, + "loss": 1.6724, + "step": 4937 + }, + { + "epoch": 0.8474343573022138, + "grad_norm": 3.820918303074717, + "learning_rate": 1.1962280218380406e-06, + "loss": 1.513, + "step": 4938 + }, + { + "epoch": 0.8476059721983868, + "grad_norm": 4.3417443080505, + "learning_rate": 1.1935931890143793e-06, + "loss": 1.5835, + "step": 4939 + }, + { + "epoch": 0.8477775870945599, + "grad_norm": 4.267216106440781, + "learning_rate": 1.1909610769753887e-06, + "loss": 1.3173, + "step": 4940 + }, + { + "epoch": 0.8479492019907328, + "grad_norm": 4.285624146019127, + "learning_rate": 1.188331686534271e-06, + "loss": 1.5256, + "step": 4941 + }, + { + "epoch": 0.8481208168869058, + "grad_norm": 3.887066518498248, + "learning_rate": 1.185705018503387e-06, + "loss": 1.4094, + "step": 4942 + }, + { + "epoch": 0.8482924317830788, + "grad_norm": 4.103010896551944, + "learning_rate": 1.183081073694261e-06, + "loss": 1.3719, + "step": 4943 + }, + { + "epoch": 0.8484640466792518, + "grad_norm": 4.526756374001578, + "learning_rate": 1.180459852917576e-06, + "loss": 1.4711, + "step": 4944 + }, + { + "epoch": 0.8486356615754247, + "grad_norm": 3.609446816689786, + "learning_rate": 1.1778413569831726e-06, + "loss": 1.4199, + "step": 4945 + }, + { + "epoch": 0.8488072764715977, + "grad_norm": 5.953425237138315, + "learning_rate": 1.175225586700045e-06, + "loss": 1.6101, + "step": 4946 + }, + { + "epoch": 0.8489788913677707, + "grad_norm": 4.882132753359956, + "learning_rate": 1.1726125428763523e-06, + "loss": 1.4541, + "step": 4947 + }, + { + "epoch": 0.8491505062639437, + "grad_norm": 5.112609916836839, + "learning_rate": 1.1700022263194043e-06, + "loss": 1.579, + "step": 4948 + }, + { + "epoch": 0.8493221211601167, + "grad_norm": 4.361799921627873, + "learning_rate": 1.1673946378356738e-06, + "loss": 1.5055, + "step": 4949 + }, + { + "epoch": 0.8494937360562896, + "grad_norm": 4.39054833895474, + "learning_rate": 1.1647897782307894e-06, + "loss": 1.4197, + "step": 4950 + }, + { + "epoch": 0.8496653509524627, + "grad_norm": 4.622661432081524, + "learning_rate": 1.162187648309534e-06, + "loss": 1.6335, + "step": 4951 + }, + { + "epoch": 0.8498369658486357, + "grad_norm": 4.596682299517377, + "learning_rate": 1.1595882488758492e-06, + "loss": 1.607, + "step": 4952 + }, + { + "epoch": 0.8500085807448087, + "grad_norm": 3.653893226363181, + "learning_rate": 1.1569915807328358e-06, + "loss": 1.2539, + "step": 4953 + }, + { + "epoch": 0.8501801956409817, + "grad_norm": 4.791232507713193, + "learning_rate": 1.1543976446827444e-06, + "loss": 1.3017, + "step": 4954 + }, + { + "epoch": 0.8503518105371546, + "grad_norm": 3.961604458265738, + "learning_rate": 1.151806441526987e-06, + "loss": 1.6042, + "step": 4955 + }, + { + "epoch": 0.8505234254333276, + "grad_norm": 4.63276277217101, + "learning_rate": 1.1492179720661267e-06, + "loss": 1.4828, + "step": 4956 + }, + { + "epoch": 0.8506950403295006, + "grad_norm": 4.785406970637545, + "learning_rate": 1.146632237099885e-06, + "loss": 1.4945, + "step": 4957 + }, + { + "epoch": 0.8508666552256736, + "grad_norm": 4.572383268847962, + "learning_rate": 1.1440492374271394e-06, + "loss": 1.6562, + "step": 4958 + }, + { + "epoch": 0.8510382701218466, + "grad_norm": 4.518244031891752, + "learning_rate": 1.141468973845923e-06, + "loss": 1.8368, + "step": 4959 + }, + { + "epoch": 0.8512098850180195, + "grad_norm": 4.444030206365939, + "learning_rate": 1.1388914471534184e-06, + "loss": 1.6599, + "step": 4960 + }, + { + "epoch": 0.8513814999141925, + "grad_norm": 4.855465196737093, + "learning_rate": 1.136316658145964e-06, + "loss": 1.5328, + "step": 4961 + }, + { + "epoch": 0.8515531148103656, + "grad_norm": 4.65434714192929, + "learning_rate": 1.1337446076190561e-06, + "loss": 1.4113, + "step": 4962 + }, + { + "epoch": 0.8517247297065386, + "grad_norm": 3.802153942979901, + "learning_rate": 1.1311752963673438e-06, + "loss": 1.4784, + "step": 4963 + }, + { + "epoch": 0.8518963446027115, + "grad_norm": 4.532892654549196, + "learning_rate": 1.12860872518463e-06, + "loss": 1.6059, + "step": 4964 + }, + { + "epoch": 0.8520679594988845, + "grad_norm": 4.107953320866706, + "learning_rate": 1.1260448948638691e-06, + "loss": 1.5517, + "step": 4965 + }, + { + "epoch": 0.8522395743950575, + "grad_norm": 3.810765754322222, + "learning_rate": 1.1234838061971675e-06, + "loss": 1.371, + "step": 4966 + }, + { + "epoch": 0.8524111892912305, + "grad_norm": 4.140868989260617, + "learning_rate": 1.1209254599757869e-06, + "loss": 1.4632, + "step": 4967 + }, + { + "epoch": 0.8525828041874035, + "grad_norm": 3.8428772865038643, + "learning_rate": 1.1183698569901435e-06, + "loss": 1.3862, + "step": 4968 + }, + { + "epoch": 0.8527544190835764, + "grad_norm": 4.890515004008787, + "learning_rate": 1.115816998029805e-06, + "loss": 1.5759, + "step": 4969 + }, + { + "epoch": 0.8529260339797494, + "grad_norm": 3.795053919538234, + "learning_rate": 1.1132668838834882e-06, + "loss": 1.5987, + "step": 4970 + }, + { + "epoch": 0.8530976488759224, + "grad_norm": 4.1313104099208315, + "learning_rate": 1.110719515339065e-06, + "loss": 1.5364, + "step": 4971 + }, + { + "epoch": 0.8532692637720954, + "grad_norm": 7.544968788076173, + "learning_rate": 1.1081748931835556e-06, + "loss": 1.4705, + "step": 4972 + }, + { + "epoch": 0.8534408786682685, + "grad_norm": 4.5034509544070325, + "learning_rate": 1.105633018203136e-06, + "loss": 1.6429, + "step": 4973 + }, + { + "epoch": 0.8536124935644414, + "grad_norm": 4.654519761720039, + "learning_rate": 1.1030938911831334e-06, + "loss": 1.4978, + "step": 4974 + }, + { + "epoch": 0.8537841084606144, + "grad_norm": 3.808434468753352, + "learning_rate": 1.1005575129080203e-06, + "loss": 1.5589, + "step": 4975 + }, + { + "epoch": 0.8539557233567874, + "grad_norm": 3.9555730128612754, + "learning_rate": 1.0980238841614266e-06, + "loss": 1.6177, + "step": 4976 + }, + { + "epoch": 0.8541273382529604, + "grad_norm": 4.1284567248545985, + "learning_rate": 1.0954930057261303e-06, + "loss": 1.4358, + "step": 4977 + }, + { + "epoch": 0.8542989531491334, + "grad_norm": 3.807985608381048, + "learning_rate": 1.0929648783840562e-06, + "loss": 1.3752, + "step": 4978 + }, + { + "epoch": 0.8544705680453063, + "grad_norm": 5.248060134853908, + "learning_rate": 1.0904395029162863e-06, + "loss": 1.5525, + "step": 4979 + }, + { + "epoch": 0.8546421829414793, + "grad_norm": 4.8533334945416176, + "learning_rate": 1.0879168801030437e-06, + "loss": 1.3577, + "step": 4980 + }, + { + "epoch": 0.8548137978376523, + "grad_norm": 4.015377988435641, + "learning_rate": 1.085397010723709e-06, + "loss": 1.431, + "step": 4981 + }, + { + "epoch": 0.8549854127338253, + "grad_norm": 3.921053711843901, + "learning_rate": 1.0828798955568065e-06, + "loss": 1.5875, + "step": 4982 + }, + { + "epoch": 0.8551570276299982, + "grad_norm": 4.569632444159621, + "learning_rate": 1.080365535380017e-06, + "loss": 1.2945, + "step": 4983 + }, + { + "epoch": 0.8553286425261712, + "grad_norm": 4.345905303645349, + "learning_rate": 1.0778539309701609e-06, + "loss": 1.5331, + "step": 4984 + }, + { + "epoch": 0.8555002574223443, + "grad_norm": 3.996629916715755, + "learning_rate": 1.0753450831032087e-06, + "loss": 1.4953, + "step": 4985 + }, + { + "epoch": 0.8556718723185173, + "grad_norm": 4.238271019743073, + "learning_rate": 1.072838992554286e-06, + "loss": 1.6939, + "step": 4986 + }, + { + "epoch": 0.8558434872146903, + "grad_norm": 4.765757086947947, + "learning_rate": 1.0703356600976611e-06, + "loss": 1.4338, + "step": 4987 + }, + { + "epoch": 0.8560151021108632, + "grad_norm": 3.800372706873989, + "learning_rate": 1.0678350865067533e-06, + "loss": 1.4469, + "step": 4988 + }, + { + "epoch": 0.8561867170070362, + "grad_norm": 4.568999646801855, + "learning_rate": 1.0653372725541267e-06, + "loss": 1.7575, + "step": 4989 + }, + { + "epoch": 0.8563583319032092, + "grad_norm": 4.482562117263046, + "learning_rate": 1.0628422190114895e-06, + "loss": 1.3495, + "step": 4990 + }, + { + "epoch": 0.8565299467993822, + "grad_norm": 4.213145510294035, + "learning_rate": 1.0603499266497063e-06, + "loss": 1.493, + "step": 4991 + }, + { + "epoch": 0.8567015616955552, + "grad_norm": 4.1229405156760235, + "learning_rate": 1.0578603962387813e-06, + "loss": 1.4297, + "step": 4992 + }, + { + "epoch": 0.8568731765917281, + "grad_norm": 4.279019490313811, + "learning_rate": 1.055373628547871e-06, + "loss": 1.379, + "step": 4993 + }, + { + "epoch": 0.8570447914879011, + "grad_norm": 3.929186299149795, + "learning_rate": 1.0528896243452713e-06, + "loss": 1.329, + "step": 4994 + }, + { + "epoch": 0.8572164063840741, + "grad_norm": 3.902466070417452, + "learning_rate": 1.0504083843984304e-06, + "loss": 1.2828, + "step": 4995 + }, + { + "epoch": 0.8573880212802472, + "grad_norm": 4.030466353548272, + "learning_rate": 1.0479299094739381e-06, + "loss": 1.435, + "step": 4996 + }, + { + "epoch": 0.8575596361764201, + "grad_norm": 4.445195079184545, + "learning_rate": 1.0454542003375323e-06, + "loss": 1.6691, + "step": 4997 + }, + { + "epoch": 0.8577312510725931, + "grad_norm": 5.1572071007171925, + "learning_rate": 1.0429812577540987e-06, + "loss": 1.2057, + "step": 4998 + }, + { + "epoch": 0.8579028659687661, + "grad_norm": 3.8568960779106245, + "learning_rate": 1.0405110824876619e-06, + "loss": 1.4172, + "step": 4999 + }, + { + "epoch": 0.8580744808649391, + "grad_norm": 4.248273821501593, + "learning_rate": 1.0380436753013978e-06, + "loss": 1.4256, + "step": 5000 + }, + { + "epoch": 0.8582460957611121, + "grad_norm": 5.1034806290838, + "learning_rate": 1.0355790369576257e-06, + "loss": 1.6606, + "step": 5001 + }, + { + "epoch": 0.858417710657285, + "grad_norm": 4.392581789796283, + "learning_rate": 1.0331171682178043e-06, + "loss": 1.5253, + "step": 5002 + }, + { + "epoch": 0.858589325553458, + "grad_norm": 3.947293078550276, + "learning_rate": 1.0306580698425439e-06, + "loss": 1.6636, + "step": 5003 + }, + { + "epoch": 0.858760940449631, + "grad_norm": 3.9923118216889217, + "learning_rate": 1.0282017425915936e-06, + "loss": 1.691, + "step": 5004 + }, + { + "epoch": 0.858932555345804, + "grad_norm": 4.672225209413681, + "learning_rate": 1.0257481872238483e-06, + "loss": 1.5384, + "step": 5005 + }, + { + "epoch": 0.859104170241977, + "grad_norm": 4.075767945124762, + "learning_rate": 1.0232974044973476e-06, + "loss": 1.5039, + "step": 5006 + }, + { + "epoch": 0.85927578513815, + "grad_norm": 4.695985161143493, + "learning_rate": 1.0208493951692755e-06, + "loss": 1.5919, + "step": 5007 + }, + { + "epoch": 0.859447400034323, + "grad_norm": 4.646407671431997, + "learning_rate": 1.018404159995955e-06, + "loss": 1.5713, + "step": 5008 + }, + { + "epoch": 0.859619014930496, + "grad_norm": 5.681333667663091, + "learning_rate": 1.0159616997328514e-06, + "loss": 1.5, + "step": 5009 + }, + { + "epoch": 0.859790629826669, + "grad_norm": 4.211669878975965, + "learning_rate": 1.0135220151345781e-06, + "loss": 1.3252, + "step": 5010 + }, + { + "epoch": 0.859962244722842, + "grad_norm": 5.012749115569514, + "learning_rate": 1.0110851069548887e-06, + "loss": 1.3362, + "step": 5011 + }, + { + "epoch": 0.8601338596190149, + "grad_norm": 4.799166594280435, + "learning_rate": 1.0086509759466789e-06, + "loss": 1.5699, + "step": 5012 + }, + { + "epoch": 0.8603054745151879, + "grad_norm": 4.11167738160439, + "learning_rate": 1.0062196228619848e-06, + "loss": 1.2304, + "step": 5013 + }, + { + "epoch": 0.8604770894113609, + "grad_norm": 3.85426264548864, + "learning_rate": 1.003791048451983e-06, + "loss": 1.3575, + "step": 5014 + }, + { + "epoch": 0.8606487043075339, + "grad_norm": 4.932616436040077, + "learning_rate": 1.0013652534669982e-06, + "loss": 1.6809, + "step": 5015 + }, + { + "epoch": 0.8608203192037068, + "grad_norm": 4.072350979329002, + "learning_rate": 9.989422386564895e-07, + "loss": 1.5079, + "step": 5016 + }, + { + "epoch": 0.8609919340998798, + "grad_norm": 4.183295675641887, + "learning_rate": 9.965220047690626e-07, + "loss": 1.5894, + "step": 5017 + }, + { + "epoch": 0.8611635489960529, + "grad_norm": 4.699226910725941, + "learning_rate": 9.941045525524573e-07, + "loss": 1.4409, + "step": 5018 + }, + { + "epoch": 0.8613351638922259, + "grad_norm": 5.677388033360187, + "learning_rate": 9.916898827535625e-07, + "loss": 1.629, + "step": 5019 + }, + { + "epoch": 0.8615067787883989, + "grad_norm": 5.537331101016582, + "learning_rate": 9.892779961183985e-07, + "loss": 1.5652, + "step": 5020 + }, + { + "epoch": 0.8616783936845718, + "grad_norm": 3.904377098747771, + "learning_rate": 9.868688933921322e-07, + "loss": 1.4424, + "step": 5021 + }, + { + "epoch": 0.8618500085807448, + "grad_norm": 4.1536312005977125, + "learning_rate": 9.844625753190696e-07, + "loss": 1.4112, + "step": 5022 + }, + { + "epoch": 0.8620216234769178, + "grad_norm": 5.393470848636458, + "learning_rate": 9.820590426426523e-07, + "loss": 1.3961, + "step": 5023 + }, + { + "epoch": 0.8621932383730908, + "grad_norm": 4.294607425487386, + "learning_rate": 9.796582961054645e-07, + "loss": 1.4801, + "step": 5024 + }, + { + "epoch": 0.8623648532692638, + "grad_norm": 4.24026990525951, + "learning_rate": 9.772603364492316e-07, + "loss": 1.5248, + "step": 5025 + }, + { + "epoch": 0.8625364681654367, + "grad_norm": 4.235663044342673, + "learning_rate": 9.748651644148132e-07, + "loss": 1.2612, + "step": 5026 + }, + { + "epoch": 0.8627080830616097, + "grad_norm": 4.108277109157669, + "learning_rate": 9.72472780742212e-07, + "loss": 1.4762, + "step": 5027 + }, + { + "epoch": 0.8628796979577827, + "grad_norm": 4.4272745917650385, + "learning_rate": 9.700831861705639e-07, + "loss": 1.4225, + "step": 5028 + }, + { + "epoch": 0.8630513128539558, + "grad_norm": 3.7793833088170388, + "learning_rate": 9.67696381438149e-07, + "loss": 1.4758, + "step": 5029 + }, + { + "epoch": 0.8632229277501288, + "grad_norm": 5.045069406961064, + "learning_rate": 9.653123672823817e-07, + "loss": 1.4896, + "step": 5030 + }, + { + "epoch": 0.8633945426463017, + "grad_norm": 4.22460255620109, + "learning_rate": 9.629311444398193e-07, + "loss": 1.4656, + "step": 5031 + }, + { + "epoch": 0.8635661575424747, + "grad_norm": 3.774385570847925, + "learning_rate": 9.60552713646149e-07, + "loss": 1.3369, + "step": 5032 + }, + { + "epoch": 0.8637377724386477, + "grad_norm": 4.2488624472773635, + "learning_rate": 9.581770756361975e-07, + "loss": 1.3296, + "step": 5033 + }, + { + "epoch": 0.8639093873348207, + "grad_norm": 4.1280459677980526, + "learning_rate": 9.558042311439331e-07, + "loss": 1.446, + "step": 5034 + }, + { + "epoch": 0.8640810022309936, + "grad_norm": 4.343120172305175, + "learning_rate": 9.534341809024583e-07, + "loss": 1.6202, + "step": 5035 + }, + { + "epoch": 0.8642526171271666, + "grad_norm": 4.372806833612556, + "learning_rate": 9.510669256440142e-07, + "loss": 1.5609, + "step": 5036 + }, + { + "epoch": 0.8644242320233396, + "grad_norm": 4.423582235138371, + "learning_rate": 9.487024660999733e-07, + "loss": 1.7528, + "step": 5037 + }, + { + "epoch": 0.8645958469195126, + "grad_norm": 4.31289752392683, + "learning_rate": 9.46340803000847e-07, + "loss": 1.5075, + "step": 5038 + }, + { + "epoch": 0.8647674618156856, + "grad_norm": 4.66911123731338, + "learning_rate": 9.439819370762848e-07, + "loss": 1.4164, + "step": 5039 + }, + { + "epoch": 0.8649390767118585, + "grad_norm": 4.370870713636476, + "learning_rate": 9.416258690550706e-07, + "loss": 1.7085, + "step": 5040 + }, + { + "epoch": 0.8651106916080316, + "grad_norm": 4.168612314843888, + "learning_rate": 9.392725996651253e-07, + "loss": 1.3594, + "step": 5041 + }, + { + "epoch": 0.8652823065042046, + "grad_norm": 4.336387567101559, + "learning_rate": 9.369221296335007e-07, + "loss": 1.4699, + "step": 5042 + }, + { + "epoch": 0.8654539214003776, + "grad_norm": 4.915770565892056, + "learning_rate": 9.345744596863892e-07, + "loss": 1.3862, + "step": 5043 + }, + { + "epoch": 0.8656255362965506, + "grad_norm": 4.011183926503289, + "learning_rate": 9.322295905491119e-07, + "loss": 1.3517, + "step": 5044 + }, + { + "epoch": 0.8657971511927235, + "grad_norm": 4.4582046103936674, + "learning_rate": 9.298875229461301e-07, + "loss": 1.6414, + "step": 5045 + }, + { + "epoch": 0.8659687660888965, + "grad_norm": 4.534494007187588, + "learning_rate": 9.275482576010409e-07, + "loss": 1.5888, + "step": 5046 + }, + { + "epoch": 0.8661403809850695, + "grad_norm": 4.49282827575606, + "learning_rate": 9.252117952365669e-07, + "loss": 1.3449, + "step": 5047 + }, + { + "epoch": 0.8663119958812425, + "grad_norm": 4.875803471072843, + "learning_rate": 9.228781365745721e-07, + "loss": 1.6039, + "step": 5048 + }, + { + "epoch": 0.8664836107774154, + "grad_norm": 4.599963016744397, + "learning_rate": 9.20547282336055e-07, + "loss": 1.5185, + "step": 5049 + }, + { + "epoch": 0.8666552256735884, + "grad_norm": 4.743972922196142, + "learning_rate": 9.18219233241141e-07, + "loss": 1.7756, + "step": 5050 + }, + { + "epoch": 0.8668268405697614, + "grad_norm": 5.507502229554712, + "learning_rate": 9.158939900090968e-07, + "loss": 1.4671, + "step": 5051 + }, + { + "epoch": 0.8669984554659345, + "grad_norm": 4.854221079348115, + "learning_rate": 9.135715533583134e-07, + "loss": 1.5826, + "step": 5052 + }, + { + "epoch": 0.8671700703621075, + "grad_norm": 4.546582928269538, + "learning_rate": 9.112519240063234e-07, + "loss": 1.4795, + "step": 5053 + }, + { + "epoch": 0.8673416852582804, + "grad_norm": 4.404939154781044, + "learning_rate": 9.089351026697868e-07, + "loss": 1.4465, + "step": 5054 + }, + { + "epoch": 0.8675133001544534, + "grad_norm": 5.352269181698289, + "learning_rate": 9.066210900644989e-07, + "loss": 1.471, + "step": 5055 + }, + { + "epoch": 0.8676849150506264, + "grad_norm": 4.629490754087409, + "learning_rate": 9.043098869053846e-07, + "loss": 1.6782, + "step": 5056 + }, + { + "epoch": 0.8678565299467994, + "grad_norm": 4.479723296306851, + "learning_rate": 9.020014939065014e-07, + "loss": 1.462, + "step": 5057 + }, + { + "epoch": 0.8680281448429724, + "grad_norm": 4.414508127403711, + "learning_rate": 8.996959117810388e-07, + "loss": 1.4556, + "step": 5058 + }, + { + "epoch": 0.8681997597391453, + "grad_norm": 4.249810920876259, + "learning_rate": 8.973931412413195e-07, + "loss": 1.514, + "step": 5059 + }, + { + "epoch": 0.8683713746353183, + "grad_norm": 4.041377062003575, + "learning_rate": 8.950931829987985e-07, + "loss": 1.5281, + "step": 5060 + }, + { + "epoch": 0.8685429895314913, + "grad_norm": 3.9156421493984714, + "learning_rate": 8.927960377640565e-07, + "loss": 1.3325, + "step": 5061 + }, + { + "epoch": 0.8687146044276643, + "grad_norm": 4.652944484996688, + "learning_rate": 8.90501706246808e-07, + "loss": 1.6896, + "step": 5062 + }, + { + "epoch": 0.8688862193238374, + "grad_norm": 4.370289293022309, + "learning_rate": 8.882101891558992e-07, + "loss": 1.6389, + "step": 5063 + }, + { + "epoch": 0.8690578342200103, + "grad_norm": 5.355449990024723, + "learning_rate": 8.859214871993071e-07, + "loss": 1.4631, + "step": 5064 + }, + { + "epoch": 0.8692294491161833, + "grad_norm": 3.645103949548479, + "learning_rate": 8.836356010841385e-07, + "loss": 1.4555, + "step": 5065 + }, + { + "epoch": 0.8694010640123563, + "grad_norm": 4.609444744786666, + "learning_rate": 8.813525315166271e-07, + "loss": 1.5836, + "step": 5066 + }, + { + "epoch": 0.8695726789085293, + "grad_norm": 4.41118749061462, + "learning_rate": 8.790722792021422e-07, + "loss": 1.403, + "step": 5067 + }, + { + "epoch": 0.8697442938047022, + "grad_norm": 6.118946041906018, + "learning_rate": 8.767948448451758e-07, + "loss": 1.3973, + "step": 5068 + }, + { + "epoch": 0.8699159087008752, + "grad_norm": 3.881324435908571, + "learning_rate": 8.745202291493548e-07, + "loss": 1.282, + "step": 5069 + }, + { + "epoch": 0.8700875235970482, + "grad_norm": 4.657596366879938, + "learning_rate": 8.722484328174364e-07, + "loss": 1.4645, + "step": 5070 + }, + { + "epoch": 0.8702591384932212, + "grad_norm": 4.333360027057549, + "learning_rate": 8.699794565512976e-07, + "loss": 1.4134, + "step": 5071 + }, + { + "epoch": 0.8704307533893942, + "grad_norm": 4.349218612199456, + "learning_rate": 8.677133010519545e-07, + "loss": 1.469, + "step": 5072 + }, + { + "epoch": 0.8706023682855671, + "grad_norm": 4.22087238577858, + "learning_rate": 8.654499670195482e-07, + "loss": 1.661, + "step": 5073 + }, + { + "epoch": 0.8707739831817402, + "grad_norm": 4.2643783383128895, + "learning_rate": 8.631894551533437e-07, + "loss": 1.4659, + "step": 5074 + }, + { + "epoch": 0.8709455980779132, + "grad_norm": 3.7801301587071237, + "learning_rate": 8.609317661517413e-07, + "loss": 1.1628, + "step": 5075 + }, + { + "epoch": 0.8711172129740862, + "grad_norm": 4.807454155631998, + "learning_rate": 8.586769007122619e-07, + "loss": 1.5086, + "step": 5076 + }, + { + "epoch": 0.8712888278702592, + "grad_norm": 4.626253892623588, + "learning_rate": 8.564248595315616e-07, + "loss": 1.5777, + "step": 5077 + }, + { + "epoch": 0.8714604427664321, + "grad_norm": 4.163467749778554, + "learning_rate": 8.541756433054172e-07, + "loss": 1.261, + "step": 5078 + }, + { + "epoch": 0.8716320576626051, + "grad_norm": 4.206738252252562, + "learning_rate": 8.519292527287393e-07, + "loss": 1.6554, + "step": 5079 + }, + { + "epoch": 0.8718036725587781, + "grad_norm": 3.9233257823993584, + "learning_rate": 8.496856884955585e-07, + "loss": 1.3324, + "step": 5080 + }, + { + "epoch": 0.8719752874549511, + "grad_norm": 4.352104845463383, + "learning_rate": 8.474449512990357e-07, + "loss": 1.6384, + "step": 5081 + }, + { + "epoch": 0.8721469023511241, + "grad_norm": 4.432618058077031, + "learning_rate": 8.452070418314584e-07, + "loss": 1.4949, + "step": 5082 + }, + { + "epoch": 0.872318517247297, + "grad_norm": 3.9214483831480806, + "learning_rate": 8.429719607842413e-07, + "loss": 1.397, + "step": 5083 + }, + { + "epoch": 0.87249013214347, + "grad_norm": 5.359178294980305, + "learning_rate": 8.40739708847923e-07, + "loss": 1.6092, + "step": 5084 + }, + { + "epoch": 0.872661747039643, + "grad_norm": 3.8919782825954194, + "learning_rate": 8.385102867121731e-07, + "loss": 1.3887, + "step": 5085 + }, + { + "epoch": 0.8728333619358161, + "grad_norm": 4.68876473689935, + "learning_rate": 8.362836950657771e-07, + "loss": 1.4126, + "step": 5086 + }, + { + "epoch": 0.873004976831989, + "grad_norm": 5.023640914761555, + "learning_rate": 8.340599345966538e-07, + "loss": 1.5846, + "step": 5087 + }, + { + "epoch": 0.873176591728162, + "grad_norm": 4.336703494860674, + "learning_rate": 8.318390059918468e-07, + "loss": 1.5998, + "step": 5088 + }, + { + "epoch": 0.873348206624335, + "grad_norm": 4.295241706006463, + "learning_rate": 8.296209099375252e-07, + "loss": 1.3604, + "step": 5089 + }, + { + "epoch": 0.873519821520508, + "grad_norm": 4.055022805214807, + "learning_rate": 8.274056471189762e-07, + "loss": 1.5656, + "step": 5090 + }, + { + "epoch": 0.873691436416681, + "grad_norm": 5.265318877741098, + "learning_rate": 8.251932182206213e-07, + "loss": 1.6882, + "step": 5091 + }, + { + "epoch": 0.8738630513128539, + "grad_norm": 4.213946294428783, + "learning_rate": 8.229836239259981e-07, + "loss": 1.3338, + "step": 5092 + }, + { + "epoch": 0.8740346662090269, + "grad_norm": 5.298311740209048, + "learning_rate": 8.207768649177738e-07, + "loss": 1.643, + "step": 5093 + }, + { + "epoch": 0.8742062811051999, + "grad_norm": 3.861289093619137, + "learning_rate": 8.185729418777388e-07, + "loss": 1.2493, + "step": 5094 + }, + { + "epoch": 0.874377896001373, + "grad_norm": 4.479952892249776, + "learning_rate": 8.16371855486805e-07, + "loss": 1.5119, + "step": 5095 + }, + { + "epoch": 0.874549510897546, + "grad_norm": 4.756410735988385, + "learning_rate": 8.141736064250094e-07, + "loss": 1.3881, + "step": 5096 + }, + { + "epoch": 0.8747211257937189, + "grad_norm": 4.387309480149174, + "learning_rate": 8.119781953715145e-07, + "loss": 1.4072, + "step": 5097 + }, + { + "epoch": 0.8748927406898919, + "grad_norm": 4.019570052494385, + "learning_rate": 8.097856230046009e-07, + "loss": 1.4999, + "step": 5098 + }, + { + "epoch": 0.8750643555860649, + "grad_norm": 4.407039523302897, + "learning_rate": 8.075958900016778e-07, + "loss": 1.3824, + "step": 5099 + }, + { + "epoch": 0.8752359704822379, + "grad_norm": 4.037211823213187, + "learning_rate": 8.05408997039272e-07, + "loss": 1.3855, + "step": 5100 + }, + { + "epoch": 0.8754075853784108, + "grad_norm": 4.01127887262451, + "learning_rate": 8.032249447930362e-07, + "loss": 1.655, + "step": 5101 + }, + { + "epoch": 0.8755792002745838, + "grad_norm": 4.017426422896338, + "learning_rate": 8.010437339377441e-07, + "loss": 1.2587, + "step": 5102 + }, + { + "epoch": 0.8757508151707568, + "grad_norm": 4.092556992762357, + "learning_rate": 7.988653651472956e-07, + "loss": 1.4627, + "step": 5103 + }, + { + "epoch": 0.8759224300669298, + "grad_norm": 4.914417080451019, + "learning_rate": 7.966898390947053e-07, + "loss": 1.4925, + "step": 5104 + }, + { + "epoch": 0.8760940449631028, + "grad_norm": 3.842738580606235, + "learning_rate": 7.945171564521126e-07, + "loss": 1.3254, + "step": 5105 + }, + { + "epoch": 0.8762656598592757, + "grad_norm": 4.958024354974342, + "learning_rate": 7.923473178907803e-07, + "loss": 1.5749, + "step": 5106 + }, + { + "epoch": 0.8764372747554487, + "grad_norm": 4.024535446446941, + "learning_rate": 7.901803240810901e-07, + "loss": 1.2354, + "step": 5107 + }, + { + "epoch": 0.8766088896516218, + "grad_norm": 4.744298179863797, + "learning_rate": 7.880161756925487e-07, + "loss": 1.5598, + "step": 5108 + }, + { + "epoch": 0.8767805045477948, + "grad_norm": 4.406646055381877, + "learning_rate": 7.85854873393781e-07, + "loss": 1.5606, + "step": 5109 + }, + { + "epoch": 0.8769521194439678, + "grad_norm": 3.889735812049366, + "learning_rate": 7.836964178525275e-07, + "loss": 1.5221, + "step": 5110 + }, + { + "epoch": 0.8771237343401407, + "grad_norm": 4.072053408410405, + "learning_rate": 7.81540809735658e-07, + "loss": 1.4832, + "step": 5111 + }, + { + "epoch": 0.8772953492363137, + "grad_norm": 4.653716773949536, + "learning_rate": 7.793880497091578e-07, + "loss": 1.4553, + "step": 5112 + }, + { + "epoch": 0.8774669641324867, + "grad_norm": 4.129292446127243, + "learning_rate": 7.772381384381323e-07, + "loss": 1.3184, + "step": 5113 + }, + { + "epoch": 0.8776385790286597, + "grad_norm": 4.147999022478401, + "learning_rate": 7.750910765868113e-07, + "loss": 1.496, + "step": 5114 + }, + { + "epoch": 0.8778101939248327, + "grad_norm": 4.498973180218164, + "learning_rate": 7.729468648185379e-07, + "loss": 1.491, + "step": 5115 + }, + { + "epoch": 0.8779818088210056, + "grad_norm": 3.918602235664639, + "learning_rate": 7.708055037957751e-07, + "loss": 1.5765, + "step": 5116 + }, + { + "epoch": 0.8781534237171786, + "grad_norm": 4.0886953095453045, + "learning_rate": 7.686669941801106e-07, + "loss": 1.41, + "step": 5117 + }, + { + "epoch": 0.8783250386133516, + "grad_norm": 4.028126965015301, + "learning_rate": 7.665313366322458e-07, + "loss": 1.4777, + "step": 5118 + }, + { + "epoch": 0.8784966535095247, + "grad_norm": 4.591336670412107, + "learning_rate": 7.643985318120073e-07, + "loss": 1.5425, + "step": 5119 + }, + { + "epoch": 0.8786682684056976, + "grad_norm": 4.544103967482204, + "learning_rate": 7.622685803783303e-07, + "loss": 1.5423, + "step": 5120 + }, + { + "epoch": 0.8788398833018706, + "grad_norm": 4.347906453781017, + "learning_rate": 7.601414829892795e-07, + "loss": 1.553, + "step": 5121 + }, + { + "epoch": 0.8790114981980436, + "grad_norm": 4.719296307510465, + "learning_rate": 7.580172403020291e-07, + "loss": 1.676, + "step": 5122 + }, + { + "epoch": 0.8791831130942166, + "grad_norm": 4.5931446291254545, + "learning_rate": 7.558958529728766e-07, + "loss": 1.4506, + "step": 5123 + }, + { + "epoch": 0.8793547279903896, + "grad_norm": 3.875195108008688, + "learning_rate": 7.537773216572331e-07, + "loss": 1.5131, + "step": 5124 + }, + { + "epoch": 0.8795263428865625, + "grad_norm": 3.8027806816798533, + "learning_rate": 7.516616470096317e-07, + "loss": 1.4165, + "step": 5125 + }, + { + "epoch": 0.8796979577827355, + "grad_norm": 4.2045088396829335, + "learning_rate": 7.495488296837205e-07, + "loss": 1.4846, + "step": 5126 + }, + { + "epoch": 0.8798695726789085, + "grad_norm": 4.356659460779504, + "learning_rate": 7.474388703322677e-07, + "loss": 1.28, + "step": 5127 + }, + { + "epoch": 0.8800411875750815, + "grad_norm": 3.924157987508082, + "learning_rate": 7.453317696071538e-07, + "loss": 1.5187, + "step": 5128 + }, + { + "epoch": 0.8802128024712546, + "grad_norm": 4.2471735256036185, + "learning_rate": 7.43227528159376e-07, + "loss": 1.4178, + "step": 5129 + }, + { + "epoch": 0.8803844173674275, + "grad_norm": 3.814645226451688, + "learning_rate": 7.41126146639054e-07, + "loss": 1.2214, + "step": 5130 + }, + { + "epoch": 0.8805560322636005, + "grad_norm": 4.50307800406103, + "learning_rate": 7.390276256954188e-07, + "loss": 1.4051, + "step": 5131 + }, + { + "epoch": 0.8807276471597735, + "grad_norm": 4.22926123412834, + "learning_rate": 7.369319659768204e-07, + "loss": 1.6091, + "step": 5132 + }, + { + "epoch": 0.8808992620559465, + "grad_norm": 4.541228544104218, + "learning_rate": 7.348391681307254e-07, + "loss": 1.3364, + "step": 5133 + }, + { + "epoch": 0.8810708769521195, + "grad_norm": 4.781747943997346, + "learning_rate": 7.3274923280371e-07, + "loss": 1.7184, + "step": 5134 + }, + { + "epoch": 0.8812424918482924, + "grad_norm": 4.763222307588499, + "learning_rate": 7.306621606414721e-07, + "loss": 1.4953, + "step": 5135 + }, + { + "epoch": 0.8814141067444654, + "grad_norm": 4.066786952295638, + "learning_rate": 7.285779522888237e-07, + "loss": 1.3728, + "step": 5136 + }, + { + "epoch": 0.8815857216406384, + "grad_norm": 4.225379332033343, + "learning_rate": 7.264966083896918e-07, + "loss": 1.6345, + "step": 5137 + }, + { + "epoch": 0.8817573365368114, + "grad_norm": 4.184227822797963, + "learning_rate": 7.2441812958712e-07, + "loss": 1.4949, + "step": 5138 + }, + { + "epoch": 0.8819289514329843, + "grad_norm": 4.473972291437994, + "learning_rate": 7.223425165232645e-07, + "loss": 1.5525, + "step": 5139 + }, + { + "epoch": 0.8821005663291573, + "grad_norm": 4.272178325788325, + "learning_rate": 7.202697698393923e-07, + "loss": 1.504, + "step": 5140 + }, + { + "epoch": 0.8822721812253304, + "grad_norm": 4.532353998495571, + "learning_rate": 7.181998901758936e-07, + "loss": 1.6841, + "step": 5141 + }, + { + "epoch": 0.8824437961215034, + "grad_norm": 4.27306585291111, + "learning_rate": 7.161328781722665e-07, + "loss": 1.5299, + "step": 5142 + }, + { + "epoch": 0.8826154110176764, + "grad_norm": 4.360330423878417, + "learning_rate": 7.140687344671282e-07, + "loss": 1.4884, + "step": 5143 + }, + { + "epoch": 0.8827870259138493, + "grad_norm": 3.9986389081246374, + "learning_rate": 7.120074596982008e-07, + "loss": 1.5262, + "step": 5144 + }, + { + "epoch": 0.8829586408100223, + "grad_norm": 4.611848913050924, + "learning_rate": 7.099490545023313e-07, + "loss": 1.509, + "step": 5145 + }, + { + "epoch": 0.8831302557061953, + "grad_norm": 4.901857884901667, + "learning_rate": 7.078935195154712e-07, + "loss": 1.4917, + "step": 5146 + }, + { + "epoch": 0.8833018706023683, + "grad_norm": 5.805379132913955, + "learning_rate": 7.058408553726881e-07, + "loss": 1.6059, + "step": 5147 + }, + { + "epoch": 0.8834734854985413, + "grad_norm": 6.410889962180712, + "learning_rate": 7.037910627081678e-07, + "loss": 1.5172, + "step": 5148 + }, + { + "epoch": 0.8836451003947142, + "grad_norm": 4.732516099682142, + "learning_rate": 7.017441421551985e-07, + "loss": 1.437, + "step": 5149 + }, + { + "epoch": 0.8838167152908872, + "grad_norm": 3.773438903238695, + "learning_rate": 6.997000943461895e-07, + "loss": 1.4421, + "step": 5150 + }, + { + "epoch": 0.8839883301870602, + "grad_norm": 4.356289653010965, + "learning_rate": 6.976589199126615e-07, + "loss": 1.3342, + "step": 5151 + }, + { + "epoch": 0.8841599450832333, + "grad_norm": 4.355173533103943, + "learning_rate": 6.956206194852422e-07, + "loss": 1.4601, + "step": 5152 + }, + { + "epoch": 0.8843315599794063, + "grad_norm": 4.178629863475834, + "learning_rate": 6.935851936936788e-07, + "loss": 1.4507, + "step": 5153 + }, + { + "epoch": 0.8845031748755792, + "grad_norm": 4.336461272694183, + "learning_rate": 6.91552643166823e-07, + "loss": 1.3607, + "step": 5154 + }, + { + "epoch": 0.8846747897717522, + "grad_norm": 4.026678360353491, + "learning_rate": 6.895229685326443e-07, + "loss": 1.507, + "step": 5155 + }, + { + "epoch": 0.8848464046679252, + "grad_norm": 5.873799972532674, + "learning_rate": 6.874961704182193e-07, + "loss": 1.7137, + "step": 5156 + }, + { + "epoch": 0.8850180195640982, + "grad_norm": 4.18488910121758, + "learning_rate": 6.854722494497424e-07, + "loss": 1.5954, + "step": 5157 + }, + { + "epoch": 0.8851896344602711, + "grad_norm": 4.802236049468994, + "learning_rate": 6.834512062525067e-07, + "loss": 1.4101, + "step": 5158 + }, + { + "epoch": 0.8853612493564441, + "grad_norm": 4.344564222834242, + "learning_rate": 6.814330414509285e-07, + "loss": 1.5013, + "step": 5159 + }, + { + "epoch": 0.8855328642526171, + "grad_norm": 3.8215241716912165, + "learning_rate": 6.794177556685288e-07, + "loss": 1.2992, + "step": 5160 + }, + { + "epoch": 0.8857044791487901, + "grad_norm": 5.286115055593522, + "learning_rate": 6.77405349527942e-07, + "loss": 1.5744, + "step": 5161 + }, + { + "epoch": 0.8858760940449631, + "grad_norm": 4.690246953547851, + "learning_rate": 6.753958236509117e-07, + "loss": 1.6899, + "step": 5162 + }, + { + "epoch": 0.886047708941136, + "grad_norm": 4.507851126536749, + "learning_rate": 6.733891786582902e-07, + "loss": 1.3506, + "step": 5163 + }, + { + "epoch": 0.8862193238373091, + "grad_norm": 3.741963759002263, + "learning_rate": 6.713854151700383e-07, + "loss": 1.3187, + "step": 5164 + }, + { + "epoch": 0.8863909387334821, + "grad_norm": 4.108044182202845, + "learning_rate": 6.693845338052329e-07, + "loss": 1.4622, + "step": 5165 + }, + { + "epoch": 0.8865625536296551, + "grad_norm": 4.21407652674191, + "learning_rate": 6.673865351820541e-07, + "loss": 1.3663, + "step": 5166 + }, + { + "epoch": 0.8867341685258281, + "grad_norm": 4.250956023727706, + "learning_rate": 6.653914199177968e-07, + "loss": 1.5219, + "step": 5167 + }, + { + "epoch": 0.886905783422001, + "grad_norm": 4.554237603698288, + "learning_rate": 6.633991886288582e-07, + "loss": 1.5059, + "step": 5168 + }, + { + "epoch": 0.887077398318174, + "grad_norm": 4.695083667332852, + "learning_rate": 6.614098419307525e-07, + "loss": 1.5965, + "step": 5169 + }, + { + "epoch": 0.887249013214347, + "grad_norm": 4.8651393462264565, + "learning_rate": 6.594233804380945e-07, + "loss": 1.4644, + "step": 5170 + }, + { + "epoch": 0.88742062811052, + "grad_norm": 4.417496382127124, + "learning_rate": 6.574398047646146e-07, + "loss": 1.484, + "step": 5171 + }, + { + "epoch": 0.8875922430066929, + "grad_norm": 5.293671134677378, + "learning_rate": 6.554591155231482e-07, + "loss": 1.4422, + "step": 5172 + }, + { + "epoch": 0.8877638579028659, + "grad_norm": 4.210464648271912, + "learning_rate": 6.53481313325638e-07, + "loss": 1.5188, + "step": 5173 + }, + { + "epoch": 0.887935472799039, + "grad_norm": 3.9454332618832777, + "learning_rate": 6.515063987831372e-07, + "loss": 1.478, + "step": 5174 + }, + { + "epoch": 0.888107087695212, + "grad_norm": 4.175447852579143, + "learning_rate": 6.495343725058067e-07, + "loss": 1.5994, + "step": 5175 + }, + { + "epoch": 0.888278702591385, + "grad_norm": 4.702515197029811, + "learning_rate": 6.475652351029116e-07, + "loss": 1.5272, + "step": 5176 + }, + { + "epoch": 0.8884503174875579, + "grad_norm": 4.255356582993296, + "learning_rate": 6.455989871828305e-07, + "loss": 1.5315, + "step": 5177 + }, + { + "epoch": 0.8886219323837309, + "grad_norm": 4.766788121069817, + "learning_rate": 6.436356293530433e-07, + "loss": 1.411, + "step": 5178 + }, + { + "epoch": 0.8887935472799039, + "grad_norm": 4.794631815797826, + "learning_rate": 6.416751622201389e-07, + "loss": 1.5831, + "step": 5179 + }, + { + "epoch": 0.8889651621760769, + "grad_norm": 4.29972368777464, + "learning_rate": 6.397175863898153e-07, + "loss": 1.663, + "step": 5180 + }, + { + "epoch": 0.8891367770722499, + "grad_norm": 3.7789557598739827, + "learning_rate": 6.377629024668774e-07, + "loss": 1.2915, + "step": 5181 + }, + { + "epoch": 0.8893083919684228, + "grad_norm": 3.9090569848790118, + "learning_rate": 6.358111110552324e-07, + "loss": 1.3081, + "step": 5182 + }, + { + "epoch": 0.8894800068645958, + "grad_norm": 4.519018907240786, + "learning_rate": 6.338622127578942e-07, + "loss": 1.4948, + "step": 5183 + }, + { + "epoch": 0.8896516217607688, + "grad_norm": 3.985418638820712, + "learning_rate": 6.31916208176988e-07, + "loss": 1.4789, + "step": 5184 + }, + { + "epoch": 0.8898232366569419, + "grad_norm": 4.391914961738118, + "learning_rate": 6.299730979137419e-07, + "loss": 1.3071, + "step": 5185 + }, + { + "epoch": 0.8899948515531149, + "grad_norm": 4.142436826140557, + "learning_rate": 6.2803288256849e-07, + "loss": 1.4889, + "step": 5186 + }, + { + "epoch": 0.8901664664492878, + "grad_norm": 4.299723211469974, + "learning_rate": 6.260955627406706e-07, + "loss": 1.2638, + "step": 5187 + }, + { + "epoch": 0.8903380813454608, + "grad_norm": 3.5712943736822593, + "learning_rate": 6.241611390288283e-07, + "loss": 1.3432, + "step": 5188 + }, + { + "epoch": 0.8905096962416338, + "grad_norm": 4.171748995329112, + "learning_rate": 6.222296120306137e-07, + "loss": 1.6976, + "step": 5189 + }, + { + "epoch": 0.8906813111378068, + "grad_norm": 4.138958074288165, + "learning_rate": 6.20300982342783e-07, + "loss": 1.4986, + "step": 5190 + }, + { + "epoch": 0.8908529260339797, + "grad_norm": 4.374146664687719, + "learning_rate": 6.183752505611962e-07, + "loss": 1.501, + "step": 5191 + }, + { + "epoch": 0.8910245409301527, + "grad_norm": 4.106357547216749, + "learning_rate": 6.164524172808162e-07, + "loss": 1.5257, + "step": 5192 + }, + { + "epoch": 0.8911961558263257, + "grad_norm": 5.846407060589214, + "learning_rate": 6.145324830957144e-07, + "loss": 1.5573, + "step": 5193 + }, + { + "epoch": 0.8913677707224987, + "grad_norm": 4.5186496629414545, + "learning_rate": 6.12615448599062e-07, + "loss": 1.6232, + "step": 5194 + }, + { + "epoch": 0.8915393856186717, + "grad_norm": 5.0965856982743505, + "learning_rate": 6.107013143831375e-07, + "loss": 1.4859, + "step": 5195 + }, + { + "epoch": 0.8917110005148446, + "grad_norm": 4.744328446437492, + "learning_rate": 6.087900810393254e-07, + "loss": 1.688, + "step": 5196 + }, + { + "epoch": 0.8918826154110177, + "grad_norm": 4.909702260880713, + "learning_rate": 6.068817491581069e-07, + "loss": 1.4724, + "step": 5197 + }, + { + "epoch": 0.8920542303071907, + "grad_norm": 3.7809838046607274, + "learning_rate": 6.049763193290725e-07, + "loss": 1.3656, + "step": 5198 + }, + { + "epoch": 0.8922258452033637, + "grad_norm": 4.387571464690832, + "learning_rate": 6.030737921409169e-07, + "loss": 1.4564, + "step": 5199 + }, + { + "epoch": 0.8923974600995367, + "grad_norm": 4.215459866078354, + "learning_rate": 6.011741681814309e-07, + "loss": 1.4627, + "step": 5200 + }, + { + "epoch": 0.8925690749957096, + "grad_norm": 4.912869724481879, + "learning_rate": 5.992774480375185e-07, + "loss": 1.4891, + "step": 5201 + }, + { + "epoch": 0.8927406898918826, + "grad_norm": 4.463581542784137, + "learning_rate": 5.973836322951765e-07, + "loss": 1.4826, + "step": 5202 + }, + { + "epoch": 0.8929123047880556, + "grad_norm": 3.6769585976356787, + "learning_rate": 5.954927215395101e-07, + "loss": 1.2443, + "step": 5203 + }, + { + "epoch": 0.8930839196842286, + "grad_norm": 4.895161395407363, + "learning_rate": 5.936047163547276e-07, + "loss": 1.4917, + "step": 5204 + }, + { + "epoch": 0.8932555345804016, + "grad_norm": 4.040043100111145, + "learning_rate": 5.91719617324138e-07, + "loss": 1.585, + "step": 5205 + }, + { + "epoch": 0.8934271494765745, + "grad_norm": 4.247969886257527, + "learning_rate": 5.898374250301508e-07, + "loss": 1.5476, + "step": 5206 + }, + { + "epoch": 0.8935987643727475, + "grad_norm": 4.298199607245423, + "learning_rate": 5.879581400542788e-07, + "loss": 1.4175, + "step": 5207 + }, + { + "epoch": 0.8937703792689206, + "grad_norm": 4.224879587969809, + "learning_rate": 5.860817629771376e-07, + "loss": 1.5978, + "step": 5208 + }, + { + "epoch": 0.8939419941650936, + "grad_norm": 4.886504545556072, + "learning_rate": 5.842082943784433e-07, + "loss": 1.5513, + "step": 5209 + }, + { + "epoch": 0.8941136090612665, + "grad_norm": 4.787234713050884, + "learning_rate": 5.823377348370152e-07, + "loss": 1.737, + "step": 5210 + }, + { + "epoch": 0.8942852239574395, + "grad_norm": 4.526442662219888, + "learning_rate": 5.804700849307699e-07, + "loss": 1.3806, + "step": 5211 + }, + { + "epoch": 0.8944568388536125, + "grad_norm": 4.204401915207515, + "learning_rate": 5.78605345236728e-07, + "loss": 1.451, + "step": 5212 + }, + { + "epoch": 0.8946284537497855, + "grad_norm": 3.7932028280801005, + "learning_rate": 5.767435163310109e-07, + "loss": 1.45, + "step": 5213 + }, + { + "epoch": 0.8948000686459585, + "grad_norm": 4.813596301989631, + "learning_rate": 5.748845987888396e-07, + "loss": 1.5563, + "step": 5214 + }, + { + "epoch": 0.8949716835421314, + "grad_norm": 4.221783949309949, + "learning_rate": 5.730285931845381e-07, + "loss": 1.5451, + "step": 5215 + }, + { + "epoch": 0.8951432984383044, + "grad_norm": 4.509655846209165, + "learning_rate": 5.711755000915265e-07, + "loss": 1.4448, + "step": 5216 + }, + { + "epoch": 0.8953149133344774, + "grad_norm": 4.428722973126899, + "learning_rate": 5.693253200823301e-07, + "loss": 1.4418, + "step": 5217 + }, + { + "epoch": 0.8954865282306504, + "grad_norm": 4.4419654505271, + "learning_rate": 5.674780537285673e-07, + "loss": 1.5026, + "step": 5218 + }, + { + "epoch": 0.8956581431268235, + "grad_norm": 4.513905473356673, + "learning_rate": 5.65633701600965e-07, + "loss": 1.59, + "step": 5219 + }, + { + "epoch": 0.8958297580229964, + "grad_norm": 3.910233224502691, + "learning_rate": 5.637922642693439e-07, + "loss": 1.47, + "step": 5220 + }, + { + "epoch": 0.8960013729191694, + "grad_norm": 4.49935798951609, + "learning_rate": 5.619537423026234e-07, + "loss": 1.5129, + "step": 5221 + }, + { + "epoch": 0.8961729878153424, + "grad_norm": 4.038674296468667, + "learning_rate": 5.601181362688257e-07, + "loss": 1.438, + "step": 5222 + }, + { + "epoch": 0.8963446027115154, + "grad_norm": 3.7292276019146233, + "learning_rate": 5.582854467350729e-07, + "loss": 1.5849, + "step": 5223 + }, + { + "epoch": 0.8965162176076883, + "grad_norm": 4.3061995042493, + "learning_rate": 5.564556742675808e-07, + "loss": 1.715, + "step": 5224 + }, + { + "epoch": 0.8966878325038613, + "grad_norm": 4.128244697435798, + "learning_rate": 5.546288194316684e-07, + "loss": 1.4726, + "step": 5225 + }, + { + "epoch": 0.8968594474000343, + "grad_norm": 4.671448654437281, + "learning_rate": 5.52804882791752e-07, + "loss": 1.5278, + "step": 5226 + }, + { + "epoch": 0.8970310622962073, + "grad_norm": 3.9520327928409844, + "learning_rate": 5.509838649113442e-07, + "loss": 1.2045, + "step": 5227 + }, + { + "epoch": 0.8972026771923803, + "grad_norm": 4.03523079885433, + "learning_rate": 5.491657663530603e-07, + "loss": 1.5759, + "step": 5228 + }, + { + "epoch": 0.8973742920885532, + "grad_norm": 4.513092002116728, + "learning_rate": 5.473505876786123e-07, + "loss": 1.6297, + "step": 5229 + }, + { + "epoch": 0.8975459069847262, + "grad_norm": 4.988141289982714, + "learning_rate": 5.455383294488059e-07, + "loss": 1.4124, + "step": 5230 + }, + { + "epoch": 0.8977175218808993, + "grad_norm": 4.800390771221265, + "learning_rate": 5.437289922235489e-07, + "loss": 1.5722, + "step": 5231 + }, + { + "epoch": 0.8978891367770723, + "grad_norm": 4.938742163744131, + "learning_rate": 5.419225765618442e-07, + "loss": 1.4789, + "step": 5232 + }, + { + "epoch": 0.8980607516732453, + "grad_norm": 4.325577003534197, + "learning_rate": 5.40119083021794e-07, + "loss": 1.4945, + "step": 5233 + }, + { + "epoch": 0.8982323665694182, + "grad_norm": 4.55228196129667, + "learning_rate": 5.383185121605994e-07, + "loss": 1.3432, + "step": 5234 + }, + { + "epoch": 0.8984039814655912, + "grad_norm": 4.47766500065033, + "learning_rate": 5.365208645345532e-07, + "loss": 1.4544, + "step": 5235 + }, + { + "epoch": 0.8985755963617642, + "grad_norm": 4.5823923539412394, + "learning_rate": 5.347261406990468e-07, + "loss": 1.3966, + "step": 5236 + }, + { + "epoch": 0.8987472112579372, + "grad_norm": 4.487230443599707, + "learning_rate": 5.329343412085708e-07, + "loss": 1.4559, + "step": 5237 + }, + { + "epoch": 0.8989188261541102, + "grad_norm": 5.201942990297392, + "learning_rate": 5.311454666167115e-07, + "loss": 1.6059, + "step": 5238 + }, + { + "epoch": 0.8990904410502831, + "grad_norm": 3.556085086748516, + "learning_rate": 5.293595174761523e-07, + "loss": 1.224, + "step": 5239 + }, + { + "epoch": 0.8992620559464561, + "grad_norm": 4.705590547794815, + "learning_rate": 5.275764943386674e-07, + "loss": 1.5347, + "step": 5240 + }, + { + "epoch": 0.8994336708426292, + "grad_norm": 3.975371850358197, + "learning_rate": 5.25796397755135e-07, + "loss": 1.4857, + "step": 5241 + }, + { + "epoch": 0.8996052857388022, + "grad_norm": 5.261206540896355, + "learning_rate": 5.240192282755219e-07, + "loss": 1.4698, + "step": 5242 + }, + { + "epoch": 0.8997769006349751, + "grad_norm": 4.311722203810764, + "learning_rate": 5.222449864488943e-07, + "loss": 1.6503, + "step": 5243 + }, + { + "epoch": 0.8999485155311481, + "grad_norm": 4.327256117526736, + "learning_rate": 5.204736728234161e-07, + "loss": 1.5411, + "step": 5244 + }, + { + "epoch": 0.9001201304273211, + "grad_norm": 3.8397599977089083, + "learning_rate": 5.187052879463394e-07, + "loss": 1.453, + "step": 5245 + }, + { + "epoch": 0.9002917453234941, + "grad_norm": 4.755520237202437, + "learning_rate": 5.169398323640196e-07, + "loss": 1.4264, + "step": 5246 + }, + { + "epoch": 0.9004633602196671, + "grad_norm": 4.460485017045077, + "learning_rate": 5.151773066219024e-07, + "loss": 1.4779, + "step": 5247 + }, + { + "epoch": 0.90063497511584, + "grad_norm": 4.646715853530595, + "learning_rate": 5.134177112645267e-07, + "loss": 1.4989, + "step": 5248 + }, + { + "epoch": 0.900806590012013, + "grad_norm": 4.008537925585396, + "learning_rate": 5.116610468355332e-07, + "loss": 1.4514, + "step": 5249 + }, + { + "epoch": 0.900978204908186, + "grad_norm": 3.887643337022098, + "learning_rate": 5.099073138776467e-07, + "loss": 1.3845, + "step": 5250 + }, + { + "epoch": 0.901149819804359, + "grad_norm": 3.9385917428235864, + "learning_rate": 5.08156512932696e-07, + "loss": 1.5364, + "step": 5251 + }, + { + "epoch": 0.901321434700532, + "grad_norm": 4.058467145465141, + "learning_rate": 5.064086445415983e-07, + "loss": 1.5497, + "step": 5252 + }, + { + "epoch": 0.901493049596705, + "grad_norm": 5.143232568164296, + "learning_rate": 5.046637092443696e-07, + "loss": 1.5667, + "step": 5253 + }, + { + "epoch": 0.901664664492878, + "grad_norm": 3.969519446132455, + "learning_rate": 5.029217075801141e-07, + "loss": 1.2721, + "step": 5254 + }, + { + "epoch": 0.901836279389051, + "grad_norm": 4.403318344872112, + "learning_rate": 5.011826400870301e-07, + "loss": 1.5335, + "step": 5255 + }, + { + "epoch": 0.902007894285224, + "grad_norm": 4.9982191788793475, + "learning_rate": 4.994465073024147e-07, + "loss": 1.8223, + "step": 5256 + }, + { + "epoch": 0.902179509181397, + "grad_norm": 3.776378028347021, + "learning_rate": 4.97713309762653e-07, + "loss": 1.4572, + "step": 5257 + }, + { + "epoch": 0.9023511240775699, + "grad_norm": 4.455696113057125, + "learning_rate": 4.95983048003228e-07, + "loss": 1.4217, + "step": 5258 + }, + { + "epoch": 0.9025227389737429, + "grad_norm": 4.165256101939233, + "learning_rate": 4.942557225587119e-07, + "loss": 1.5347, + "step": 5259 + }, + { + "epoch": 0.9026943538699159, + "grad_norm": 6.578797851625758, + "learning_rate": 4.925313339627679e-07, + "loss": 1.5379, + "step": 5260 + }, + { + "epoch": 0.9028659687660889, + "grad_norm": 3.8680726495336217, + "learning_rate": 4.908098827481578e-07, + "loss": 1.479, + "step": 5261 + }, + { + "epoch": 0.9030375836622618, + "grad_norm": 4.545699649524925, + "learning_rate": 4.890913694467325e-07, + "loss": 1.4346, + "step": 5262 + }, + { + "epoch": 0.9032091985584348, + "grad_norm": 4.181696160904031, + "learning_rate": 4.873757945894352e-07, + "loss": 1.3432, + "step": 5263 + }, + { + "epoch": 0.9033808134546079, + "grad_norm": 4.582981631392994, + "learning_rate": 4.85663158706301e-07, + "loss": 1.6212, + "step": 5264 + }, + { + "epoch": 0.9035524283507809, + "grad_norm": 4.8575917294020625, + "learning_rate": 4.839534623264586e-07, + "loss": 1.3309, + "step": 5265 + }, + { + "epoch": 0.9037240432469539, + "grad_norm": 5.494250578514054, + "learning_rate": 4.822467059781255e-07, + "loss": 1.6245, + "step": 5266 + }, + { + "epoch": 0.9038956581431268, + "grad_norm": 4.672265268035886, + "learning_rate": 4.805428901886145e-07, + "loss": 1.6497, + "step": 5267 + }, + { + "epoch": 0.9040672730392998, + "grad_norm": 5.219870244895374, + "learning_rate": 4.788420154843287e-07, + "loss": 1.5394, + "step": 5268 + }, + { + "epoch": 0.9042388879354728, + "grad_norm": 5.382312411966969, + "learning_rate": 4.771440823907603e-07, + "loss": 1.5024, + "step": 5269 + }, + { + "epoch": 0.9044105028316458, + "grad_norm": 4.363802487685458, + "learning_rate": 4.7544909143249605e-07, + "loss": 1.2821, + "step": 5270 + }, + { + "epoch": 0.9045821177278188, + "grad_norm": 4.038441773227084, + "learning_rate": 4.737570431332128e-07, + "loss": 1.2983, + "step": 5271 + }, + { + "epoch": 0.9047537326239917, + "grad_norm": 4.3903952681436476, + "learning_rate": 4.720679380156745e-07, + "loss": 1.6009, + "step": 5272 + }, + { + "epoch": 0.9049253475201647, + "grad_norm": 4.1774739609620335, + "learning_rate": 4.703817766017427e-07, + "loss": 1.4655, + "step": 5273 + }, + { + "epoch": 0.9050969624163377, + "grad_norm": 4.788134222975619, + "learning_rate": 4.6869855941236165e-07, + "loss": 1.4647, + "step": 5274 + }, + { + "epoch": 0.9052685773125108, + "grad_norm": 4.117806313593401, + "learning_rate": 4.6701828696757213e-07, + "loss": 1.68, + "step": 5275 + }, + { + "epoch": 0.9054401922086837, + "grad_norm": 3.643260914188979, + "learning_rate": 4.653409597865033e-07, + "loss": 1.4958, + "step": 5276 + }, + { + "epoch": 0.9056118071048567, + "grad_norm": 5.456473938336759, + "learning_rate": 4.6366657838737394e-07, + "loss": 1.4086, + "step": 5277 + }, + { + "epoch": 0.9057834220010297, + "grad_norm": 4.485563589001977, + "learning_rate": 4.619951432874936e-07, + "loss": 1.1558, + "step": 5278 + }, + { + "epoch": 0.9059550368972027, + "grad_norm": 3.949412510101276, + "learning_rate": 4.6032665500325704e-07, + "loss": 1.5094, + "step": 5279 + }, + { + "epoch": 0.9061266517933757, + "grad_norm": 5.767455139586388, + "learning_rate": 4.586611140501551e-07, + "loss": 1.4061, + "step": 5280 + }, + { + "epoch": 0.9062982666895486, + "grad_norm": 4.065568408551128, + "learning_rate": 4.569985209427652e-07, + "loss": 1.4209, + "step": 5281 + }, + { + "epoch": 0.9064698815857216, + "grad_norm": 4.178978782132392, + "learning_rate": 4.5533887619475415e-07, + "loss": 1.3886, + "step": 5282 + }, + { + "epoch": 0.9066414964818946, + "grad_norm": 5.199621160985476, + "learning_rate": 4.5368218031887735e-07, + "loss": 1.4529, + "step": 5283 + }, + { + "epoch": 0.9068131113780676, + "grad_norm": 5.037264222852291, + "learning_rate": 4.520284338269787e-07, + "loss": 1.5575, + "step": 5284 + }, + { + "epoch": 0.9069847262742406, + "grad_norm": 3.7275594194059742, + "learning_rate": 4.5037763722999284e-07, + "loss": 1.4773, + "step": 5285 + }, + { + "epoch": 0.9071563411704135, + "grad_norm": 4.42312004462506, + "learning_rate": 4.487297910379407e-07, + "loss": 1.6172, + "step": 5286 + }, + { + "epoch": 0.9073279560665866, + "grad_norm": 4.201406125086892, + "learning_rate": 4.4708489575993496e-07, + "loss": 1.3437, + "step": 5287 + }, + { + "epoch": 0.9074995709627596, + "grad_norm": 3.998668600183201, + "learning_rate": 4.4544295190417253e-07, + "loss": 1.5552, + "step": 5288 + }, + { + "epoch": 0.9076711858589326, + "grad_norm": 3.961995186279598, + "learning_rate": 4.438039599779409e-07, + "loss": 1.4371, + "step": 5289 + }, + { + "epoch": 0.9078428007551056, + "grad_norm": 4.128514877165511, + "learning_rate": 4.42167920487615e-07, + "loss": 1.6085, + "step": 5290 + }, + { + "epoch": 0.9080144156512785, + "grad_norm": 5.047524015574739, + "learning_rate": 4.4053483393865724e-07, + "loss": 1.7038, + "step": 5291 + }, + { + "epoch": 0.9081860305474515, + "grad_norm": 4.673752678909029, + "learning_rate": 4.3890470083562066e-07, + "loss": 1.5429, + "step": 5292 + }, + { + "epoch": 0.9083576454436245, + "grad_norm": 4.77742482137629, + "learning_rate": 4.37277521682139e-07, + "loss": 1.6082, + "step": 5293 + }, + { + "epoch": 0.9085292603397975, + "grad_norm": 4.630445791702161, + "learning_rate": 4.356532969809402e-07, + "loss": 1.2921, + "step": 5294 + }, + { + "epoch": 0.9087008752359704, + "grad_norm": 4.139412099054128, + "learning_rate": 4.340320272338372e-07, + "loss": 1.5239, + "step": 5295 + }, + { + "epoch": 0.9088724901321434, + "grad_norm": 3.9404021236159936, + "learning_rate": 4.324137129417283e-07, + "loss": 1.5066, + "step": 5296 + }, + { + "epoch": 0.9090441050283165, + "grad_norm": 3.8511515844541666, + "learning_rate": 4.307983546046024e-07, + "loss": 1.5601, + "step": 5297 + }, + { + "epoch": 0.9092157199244895, + "grad_norm": 4.318761606690782, + "learning_rate": 4.2918595272152916e-07, + "loss": 1.4133, + "step": 5298 + }, + { + "epoch": 0.9093873348206625, + "grad_norm": 5.163062131663407, + "learning_rate": 4.2757650779067016e-07, + "loss": 1.7038, + "step": 5299 + }, + { + "epoch": 0.9095589497168354, + "grad_norm": 5.281425754772876, + "learning_rate": 4.25970020309272e-07, + "loss": 1.5196, + "step": 5300 + }, + { + "epoch": 0.9097305646130084, + "grad_norm": 4.202807507081207, + "learning_rate": 4.243664907736689e-07, + "loss": 1.3688, + "step": 5301 + }, + { + "epoch": 0.9099021795091814, + "grad_norm": 4.136558213734609, + "learning_rate": 4.2276591967927663e-07, + "loss": 1.1983, + "step": 5302 + }, + { + "epoch": 0.9100737944053544, + "grad_norm": 4.1092169348474, + "learning_rate": 4.211683075206008e-07, + "loss": 1.6398, + "step": 5303 + }, + { + "epoch": 0.9102454093015274, + "grad_norm": 5.159421862381322, + "learning_rate": 4.19573654791231e-07, + "loss": 1.6633, + "step": 5304 + }, + { + "epoch": 0.9104170241977003, + "grad_norm": 4.620241065129417, + "learning_rate": 4.1798196198384545e-07, + "loss": 1.4688, + "step": 5305 + }, + { + "epoch": 0.9105886390938733, + "grad_norm": 5.464614299595415, + "learning_rate": 4.163932295902051e-07, + "loss": 1.4655, + "step": 5306 + }, + { + "epoch": 0.9107602539900463, + "grad_norm": 3.795028293352429, + "learning_rate": 4.148074581011574e-07, + "loss": 1.2971, + "step": 5307 + }, + { + "epoch": 0.9109318688862194, + "grad_norm": 4.27588811343906, + "learning_rate": 4.1322464800663265e-07, + "loss": 1.2765, + "step": 5308 + }, + { + "epoch": 0.9111034837823924, + "grad_norm": 4.919279668780527, + "learning_rate": 4.1164479979564853e-07, + "loss": 1.7586, + "step": 5309 + }, + { + "epoch": 0.9112750986785653, + "grad_norm": 4.65101060675833, + "learning_rate": 4.1006791395630906e-07, + "loss": 1.4632, + "step": 5310 + }, + { + "epoch": 0.9114467135747383, + "grad_norm": 4.701162057334624, + "learning_rate": 4.084939909758012e-07, + "loss": 1.5103, + "step": 5311 + }, + { + "epoch": 0.9116183284709113, + "grad_norm": 4.360328841283706, + "learning_rate": 4.069230313403949e-07, + "loss": 1.5297, + "step": 5312 + }, + { + "epoch": 0.9117899433670843, + "grad_norm": 4.638740863989734, + "learning_rate": 4.053550355354485e-07, + "loss": 1.7828, + "step": 5313 + }, + { + "epoch": 0.9119615582632572, + "grad_norm": 4.159565266083416, + "learning_rate": 4.03790004045399e-07, + "loss": 1.4288, + "step": 5314 + }, + { + "epoch": 0.9121331731594302, + "grad_norm": 4.232778824601497, + "learning_rate": 4.0222793735377295e-07, + "loss": 1.6523, + "step": 5315 + }, + { + "epoch": 0.9123047880556032, + "grad_norm": 5.449907376759737, + "learning_rate": 4.006688359431798e-07, + "loss": 1.4393, + "step": 5316 + }, + { + "epoch": 0.9124764029517762, + "grad_norm": 4.8068634680623425, + "learning_rate": 3.991127002953099e-07, + "loss": 1.521, + "step": 5317 + }, + { + "epoch": 0.9126480178479492, + "grad_norm": 4.981338601725879, + "learning_rate": 3.975595308909397e-07, + "loss": 1.3967, + "step": 5318 + }, + { + "epoch": 0.9128196327441221, + "grad_norm": 5.441131859753169, + "learning_rate": 3.96009328209932e-07, + "loss": 1.5284, + "step": 5319 + }, + { + "epoch": 0.9129912476402952, + "grad_norm": 4.659763391867403, + "learning_rate": 3.944620927312248e-07, + "loss": 1.5035, + "step": 5320 + }, + { + "epoch": 0.9131628625364682, + "grad_norm": 4.077267955695744, + "learning_rate": 3.9291782493284913e-07, + "loss": 1.2405, + "step": 5321 + }, + { + "epoch": 0.9133344774326412, + "grad_norm": 3.778531105247793, + "learning_rate": 3.9137652529191105e-07, + "loss": 1.3553, + "step": 5322 + }, + { + "epoch": 0.9135060923288142, + "grad_norm": 4.519886256364349, + "learning_rate": 3.8983819428460414e-07, + "loss": 1.472, + "step": 5323 + }, + { + "epoch": 0.9136777072249871, + "grad_norm": 5.036479945569544, + "learning_rate": 3.883028323862048e-07, + "loss": 1.6447, + "step": 5324 + }, + { + "epoch": 0.9138493221211601, + "grad_norm": 4.143650999507501, + "learning_rate": 3.867704400710703e-07, + "loss": 1.457, + "step": 5325 + }, + { + "epoch": 0.9140209370173331, + "grad_norm": 4.151242116647198, + "learning_rate": 3.852410178126409e-07, + "loss": 1.5368, + "step": 5326 + }, + { + "epoch": 0.9141925519135061, + "grad_norm": 4.8733356611955765, + "learning_rate": 3.837145660834385e-07, + "loss": 1.6504, + "step": 5327 + }, + { + "epoch": 0.9143641668096791, + "grad_norm": 4.6171287558702705, + "learning_rate": 3.821910853550692e-07, + "loss": 1.68, + "step": 5328 + }, + { + "epoch": 0.914535781705852, + "grad_norm": 4.195227763871497, + "learning_rate": 3.80670576098221e-07, + "loss": 1.4752, + "step": 5329 + }, + { + "epoch": 0.914707396602025, + "grad_norm": 3.906091652760167, + "learning_rate": 3.7915303878266253e-07, + "loss": 1.2803, + "step": 5330 + }, + { + "epoch": 0.9148790114981981, + "grad_norm": 4.182155883621883, + "learning_rate": 3.776384738772443e-07, + "loss": 1.4749, + "step": 5331 + }, + { + "epoch": 0.9150506263943711, + "grad_norm": 4.718364924466981, + "learning_rate": 3.7612688184989865e-07, + "loss": 1.5893, + "step": 5332 + }, + { + "epoch": 0.915222241290544, + "grad_norm": 4.555852311365258, + "learning_rate": 3.7461826316763984e-07, + "loss": 1.4063, + "step": 5333 + }, + { + "epoch": 0.915393856186717, + "grad_norm": 4.757136065648205, + "learning_rate": 3.7311261829656276e-07, + "loss": 1.4902, + "step": 5334 + }, + { + "epoch": 0.91556547108289, + "grad_norm": 4.914420316871826, + "learning_rate": 3.716099477018475e-07, + "loss": 1.6006, + "step": 5335 + }, + { + "epoch": 0.915737085979063, + "grad_norm": 5.553957609853261, + "learning_rate": 3.701102518477473e-07, + "loss": 1.547, + "step": 5336 + }, + { + "epoch": 0.915908700875236, + "grad_norm": 4.256268050121203, + "learning_rate": 3.686135311976047e-07, + "loss": 1.612, + "step": 5337 + }, + { + "epoch": 0.9160803157714089, + "grad_norm": 4.245408389763915, + "learning_rate": 3.6711978621383647e-07, + "loss": 1.5116, + "step": 5338 + }, + { + "epoch": 0.9162519306675819, + "grad_norm": 5.081687820531221, + "learning_rate": 3.6562901735794464e-07, + "loss": 1.7774, + "step": 5339 + }, + { + "epoch": 0.9164235455637549, + "grad_norm": 5.424587713412453, + "learning_rate": 3.6414122509050963e-07, + "loss": 1.7561, + "step": 5340 + }, + { + "epoch": 0.916595160459928, + "grad_norm": 4.0159394527488, + "learning_rate": 3.626564098711904e-07, + "loss": 1.2071, + "step": 5341 + }, + { + "epoch": 0.916766775356101, + "grad_norm": 3.653732629353631, + "learning_rate": 3.6117457215873007e-07, + "loss": 1.31, + "step": 5342 + }, + { + "epoch": 0.9169383902522739, + "grad_norm": 4.417356671719247, + "learning_rate": 3.5969571241095125e-07, + "loss": 1.491, + "step": 5343 + }, + { + "epoch": 0.9171100051484469, + "grad_norm": 4.205177223904458, + "learning_rate": 3.582198310847529e-07, + "loss": 1.3586, + "step": 5344 + }, + { + "epoch": 0.9172816200446199, + "grad_norm": 4.9544216874469615, + "learning_rate": 3.56746928636118e-07, + "loss": 1.3357, + "step": 5345 + }, + { + "epoch": 0.9174532349407929, + "grad_norm": 4.545971443192365, + "learning_rate": 3.55277005520106e-07, + "loss": 1.4425, + "step": 5346 + }, + { + "epoch": 0.9176248498369658, + "grad_norm": 4.722418176836132, + "learning_rate": 3.538100621908569e-07, + "loss": 1.3921, + "step": 5347 + }, + { + "epoch": 0.9177964647331388, + "grad_norm": 4.057783399430007, + "learning_rate": 3.523460991015915e-07, + "loss": 1.6308, + "step": 5348 + }, + { + "epoch": 0.9179680796293118, + "grad_norm": 5.273019770925544, + "learning_rate": 3.50885116704609e-07, + "loss": 1.7232, + "step": 5349 + }, + { + "epoch": 0.9181396945254848, + "grad_norm": 4.42627069754146, + "learning_rate": 3.494271154512874e-07, + "loss": 1.5878, + "step": 5350 + }, + { + "epoch": 0.9183113094216578, + "grad_norm": 4.342110447707852, + "learning_rate": 3.4797209579208067e-07, + "loss": 1.5794, + "step": 5351 + }, + { + "epoch": 0.9184829243178307, + "grad_norm": 5.160650884382741, + "learning_rate": 3.4652005817652824e-07, + "loss": 1.434, + "step": 5352 + }, + { + "epoch": 0.9186545392140038, + "grad_norm": 4.672146926715929, + "learning_rate": 3.4507100305324337e-07, + "loss": 1.447, + "step": 5353 + }, + { + "epoch": 0.9188261541101768, + "grad_norm": 4.647768137125765, + "learning_rate": 3.436249308699202e-07, + "loss": 1.4473, + "step": 5354 + }, + { + "epoch": 0.9189977690063498, + "grad_norm": 4.3358851893594474, + "learning_rate": 3.4218184207332804e-07, + "loss": 1.4792, + "step": 5355 + }, + { + "epoch": 0.9191693839025228, + "grad_norm": 4.0404804565843975, + "learning_rate": 3.4074173710931804e-07, + "loss": 1.5571, + "step": 5356 + }, + { + "epoch": 0.9193409987986957, + "grad_norm": 3.917131038925715, + "learning_rate": 3.3930461642281644e-07, + "loss": 1.4437, + "step": 5357 + }, + { + "epoch": 0.9195126136948687, + "grad_norm": 5.9250294479410925, + "learning_rate": 3.3787048045783146e-07, + "loss": 1.6586, + "step": 5358 + }, + { + "epoch": 0.9196842285910417, + "grad_norm": 5.32193496516879, + "learning_rate": 3.364393296574453e-07, + "loss": 1.6385, + "step": 5359 + }, + { + "epoch": 0.9198558434872147, + "grad_norm": 3.6713072214805345, + "learning_rate": 3.3501116446381875e-07, + "loss": 1.4173, + "step": 5360 + }, + { + "epoch": 0.9200274583833877, + "grad_norm": 4.325247897984546, + "learning_rate": 3.335859853181922e-07, + "loss": 1.594, + "step": 5361 + }, + { + "epoch": 0.9201990732795606, + "grad_norm": 3.9556601346905746, + "learning_rate": 3.321637926608823e-07, + "loss": 1.362, + "step": 5362 + }, + { + "epoch": 0.9203706881757336, + "grad_norm": 4.0808374972855646, + "learning_rate": 3.307445869312809e-07, + "loss": 1.4276, + "step": 5363 + }, + { + "epoch": 0.9205423030719067, + "grad_norm": 4.428345514591795, + "learning_rate": 3.2932836856786164e-07, + "loss": 1.3601, + "step": 5364 + }, + { + "epoch": 0.9207139179680797, + "grad_norm": 5.326500874434499, + "learning_rate": 3.279151380081691e-07, + "loss": 1.5781, + "step": 5365 + }, + { + "epoch": 0.9208855328642526, + "grad_norm": 4.57421907352443, + "learning_rate": 3.265048956888306e-07, + "loss": 1.4663, + "step": 5366 + }, + { + "epoch": 0.9210571477604256, + "grad_norm": 4.613812049849862, + "learning_rate": 3.250976420455465e-07, + "loss": 1.6557, + "step": 5367 + }, + { + "epoch": 0.9212287626565986, + "grad_norm": 4.430156567713831, + "learning_rate": 3.236933775130968e-07, + "loss": 1.7358, + "step": 5368 + }, + { + "epoch": 0.9214003775527716, + "grad_norm": 5.04977908171309, + "learning_rate": 3.222921025253356e-07, + "loss": 1.4869, + "step": 5369 + }, + { + "epoch": 0.9215719924489446, + "grad_norm": 5.115593825488799, + "learning_rate": 3.20893817515191e-07, + "loss": 1.5958, + "step": 5370 + }, + { + "epoch": 0.9217436073451175, + "grad_norm": 4.088029261173349, + "learning_rate": 3.194985229146741e-07, + "loss": 1.2138, + "step": 5371 + }, + { + "epoch": 0.9219152222412905, + "grad_norm": 4.5759428070142505, + "learning_rate": 3.181062191548667e-07, + "loss": 1.2562, + "step": 5372 + }, + { + "epoch": 0.9220868371374635, + "grad_norm": 5.254876753182029, + "learning_rate": 3.1671690666592925e-07, + "loss": 1.3768, + "step": 5373 + }, + { + "epoch": 0.9222584520336365, + "grad_norm": 4.775672443241848, + "learning_rate": 3.1533058587709606e-07, + "loss": 1.4375, + "step": 5374 + }, + { + "epoch": 0.9224300669298096, + "grad_norm": 4.162721060436665, + "learning_rate": 3.1394725721667794e-07, + "loss": 1.2617, + "step": 5375 + }, + { + "epoch": 0.9226016818259825, + "grad_norm": 5.306961707635675, + "learning_rate": 3.125669211120619e-07, + "loss": 1.6401, + "step": 5376 + }, + { + "epoch": 0.9227732967221555, + "grad_norm": 4.104382892199951, + "learning_rate": 3.1118957798970895e-07, + "loss": 1.2813, + "step": 5377 + }, + { + "epoch": 0.9229449116183285, + "grad_norm": 3.978223757181011, + "learning_rate": 3.0981522827515876e-07, + "loss": 1.4656, + "step": 5378 + }, + { + "epoch": 0.9231165265145015, + "grad_norm": 5.180532339256917, + "learning_rate": 3.084438723930205e-07, + "loss": 1.4536, + "step": 5379 + }, + { + "epoch": 0.9232881414106745, + "grad_norm": 5.061494779717264, + "learning_rate": 3.070755107669843e-07, + "loss": 1.5193, + "step": 5380 + }, + { + "epoch": 0.9234597563068474, + "grad_norm": 4.647210995365146, + "learning_rate": 3.057101438198107e-07, + "loss": 1.6764, + "step": 5381 + }, + { + "epoch": 0.9236313712030204, + "grad_norm": 4.596035808100414, + "learning_rate": 3.0434777197333785e-07, + "loss": 1.5186, + "step": 5382 + }, + { + "epoch": 0.9238029860991934, + "grad_norm": 4.246198579622432, + "learning_rate": 3.0298839564847804e-07, + "loss": 1.3931, + "step": 5383 + }, + { + "epoch": 0.9239746009953664, + "grad_norm": 5.472122145728372, + "learning_rate": 3.016320152652152e-07, + "loss": 1.4979, + "step": 5384 + }, + { + "epoch": 0.9241462158915393, + "grad_norm": 4.483257198830271, + "learning_rate": 3.0027863124261204e-07, + "loss": 1.6708, + "step": 5385 + }, + { + "epoch": 0.9243178307877123, + "grad_norm": 4.283443055866963, + "learning_rate": 2.989282439988039e-07, + "loss": 1.5622, + "step": 5386 + }, + { + "epoch": 0.9244894456838854, + "grad_norm": 5.108144941988228, + "learning_rate": 2.975808539509983e-07, + "loss": 1.4247, + "step": 5387 + }, + { + "epoch": 0.9246610605800584, + "grad_norm": 4.9080051075921345, + "learning_rate": 2.9623646151548004e-07, + "loss": 1.5395, + "step": 5388 + }, + { + "epoch": 0.9248326754762314, + "grad_norm": 3.9609765253317093, + "learning_rate": 2.948950671076034e-07, + "loss": 1.292, + "step": 5389 + }, + { + "epoch": 0.9250042903724043, + "grad_norm": 4.077791367965805, + "learning_rate": 2.935566711418014e-07, + "loss": 1.46, + "step": 5390 + }, + { + "epoch": 0.9251759052685773, + "grad_norm": 5.31914432062151, + "learning_rate": 2.9222127403157773e-07, + "loss": 1.5185, + "step": 5391 + }, + { + "epoch": 0.9253475201647503, + "grad_norm": 4.6330553464781685, + "learning_rate": 2.908888761895112e-07, + "loss": 1.518, + "step": 5392 + }, + { + "epoch": 0.9255191350609233, + "grad_norm": 4.125501160404574, + "learning_rate": 2.895594780272515e-07, + "loss": 1.4487, + "step": 5393 + }, + { + "epoch": 0.9256907499570963, + "grad_norm": 5.050516248080808, + "learning_rate": 2.882330799555233e-07, + "loss": 1.4311, + "step": 5394 + }, + { + "epoch": 0.9258623648532692, + "grad_norm": 4.276476782200002, + "learning_rate": 2.8690968238412444e-07, + "loss": 1.3596, + "step": 5395 + }, + { + "epoch": 0.9260339797494422, + "grad_norm": 4.455226772166471, + "learning_rate": 2.8558928572192444e-07, + "loss": 1.5735, + "step": 5396 + }, + { + "epoch": 0.9262055946456152, + "grad_norm": 4.452040685222897, + "learning_rate": 2.8427189037686933e-07, + "loss": 1.4951, + "step": 5397 + }, + { + "epoch": 0.9263772095417883, + "grad_norm": 4.883370990093987, + "learning_rate": 2.829574967559723e-07, + "loss": 1.5208, + "step": 5398 + }, + { + "epoch": 0.9265488244379612, + "grad_norm": 4.10642429487041, + "learning_rate": 2.81646105265323e-07, + "loss": 1.3949, + "step": 5399 + }, + { + "epoch": 0.9267204393341342, + "grad_norm": 5.00072667775669, + "learning_rate": 2.803377163100818e-07, + "loss": 1.5265, + "step": 5400 + }, + { + "epoch": 0.9268920542303072, + "grad_norm": 4.845865399519903, + "learning_rate": 2.790323302944831e-07, + "loss": 1.4133, + "step": 5401 + }, + { + "epoch": 0.9270636691264802, + "grad_norm": 4.690431091503764, + "learning_rate": 2.7772994762183426e-07, + "loss": 1.5244, + "step": 5402 + }, + { + "epoch": 0.9272352840226532, + "grad_norm": 5.091401768899432, + "learning_rate": 2.7643056869451015e-07, + "loss": 1.3736, + "step": 5403 + }, + { + "epoch": 0.9274068989188261, + "grad_norm": 4.591526642457146, + "learning_rate": 2.751341939139618e-07, + "loss": 1.5156, + "step": 5404 + }, + { + "epoch": 0.9275785138149991, + "grad_norm": 5.37448574464333, + "learning_rate": 2.738408236807111e-07, + "loss": 1.5098, + "step": 5405 + }, + { + "epoch": 0.9277501287111721, + "grad_norm": 4.2669076798826, + "learning_rate": 2.7255045839435054e-07, + "loss": 1.2476, + "step": 5406 + }, + { + "epoch": 0.9279217436073451, + "grad_norm": 4.102832677100814, + "learning_rate": 2.7126309845354694e-07, + "loss": 1.4285, + "step": 5407 + }, + { + "epoch": 0.9280933585035182, + "grad_norm": 5.35685891960044, + "learning_rate": 2.699787442560342e-07, + "loss": 1.6172, + "step": 5408 + }, + { + "epoch": 0.928264973399691, + "grad_norm": 5.3607508756824505, + "learning_rate": 2.6869739619862165e-07, + "loss": 1.505, + "step": 5409 + }, + { + "epoch": 0.9284365882958641, + "grad_norm": 3.655402870444697, + "learning_rate": 2.674190546771904e-07, + "loss": 1.3444, + "step": 5410 + }, + { + "epoch": 0.9286082031920371, + "grad_norm": 4.080997386538315, + "learning_rate": 2.6614372008668675e-07, + "loss": 1.1752, + "step": 5411 + }, + { + "epoch": 0.9287798180882101, + "grad_norm": 5.681595602476748, + "learning_rate": 2.6487139282113437e-07, + "loss": 1.515, + "step": 5412 + }, + { + "epoch": 0.9289514329843831, + "grad_norm": 4.230696391350242, + "learning_rate": 2.636020732736233e-07, + "loss": 1.5766, + "step": 5413 + }, + { + "epoch": 0.929123047880556, + "grad_norm": 4.444666272055504, + "learning_rate": 2.623357618363176e-07, + "loss": 1.4772, + "step": 5414 + }, + { + "epoch": 0.929294662776729, + "grad_norm": 4.577090789386312, + "learning_rate": 2.6107245890045205e-07, + "loss": 1.627, + "step": 5415 + }, + { + "epoch": 0.929466277672902, + "grad_norm": 4.813376168898806, + "learning_rate": 2.59812164856329e-07, + "loss": 1.4489, + "step": 5416 + }, + { + "epoch": 0.929637892569075, + "grad_norm": 4.468312323542875, + "learning_rate": 2.585548800933235e-07, + "loss": 1.3745, + "step": 5417 + }, + { + "epoch": 0.9298095074652479, + "grad_norm": 4.115224502173242, + "learning_rate": 2.573006049998783e-07, + "loss": 1.4818, + "step": 5418 + }, + { + "epoch": 0.9299811223614209, + "grad_norm": 5.082592295099144, + "learning_rate": 2.5604933996351e-07, + "loss": 1.6077, + "step": 5419 + }, + { + "epoch": 0.930152737257594, + "grad_norm": 5.3530434976445544, + "learning_rate": 2.5480108537080275e-07, + "loss": 1.3564, + "step": 5420 + }, + { + "epoch": 0.930324352153767, + "grad_norm": 4.207748830053649, + "learning_rate": 2.535558416074113e-07, + "loss": 1.3965, + "step": 5421 + }, + { + "epoch": 0.93049596704994, + "grad_norm": 4.110903228790358, + "learning_rate": 2.5231360905806133e-07, + "loss": 1.3848, + "step": 5422 + }, + { + "epoch": 0.9306675819461129, + "grad_norm": 4.131267606119521, + "learning_rate": 2.510743881065447e-07, + "loss": 1.3226, + "step": 5423 + }, + { + "epoch": 0.9308391968422859, + "grad_norm": 5.018272101149, + "learning_rate": 2.4983817913572626e-07, + "loss": 1.4884, + "step": 5424 + }, + { + "epoch": 0.9310108117384589, + "grad_norm": 3.957374956760122, + "learning_rate": 2.4860498252753827e-07, + "loss": 1.5885, + "step": 5425 + }, + { + "epoch": 0.9311824266346319, + "grad_norm": 4.209945118333955, + "learning_rate": 2.47374798662986e-07, + "loss": 1.262, + "step": 5426 + }, + { + "epoch": 0.9313540415308049, + "grad_norm": 4.851479119584087, + "learning_rate": 2.4614762792213754e-07, + "loss": 1.5034, + "step": 5427 + }, + { + "epoch": 0.9315256564269778, + "grad_norm": 4.737621251186431, + "learning_rate": 2.449234706841364e-07, + "loss": 1.7132, + "step": 5428 + }, + { + "epoch": 0.9316972713231508, + "grad_norm": 4.717165212880252, + "learning_rate": 2.437023273271888e-07, + "loss": 1.5048, + "step": 5429 + }, + { + "epoch": 0.9318688862193238, + "grad_norm": 4.793579595824607, + "learning_rate": 2.4248419822857636e-07, + "loss": 1.4585, + "step": 5430 + }, + { + "epoch": 0.9320405011154969, + "grad_norm": 4.803817184445834, + "learning_rate": 2.41269083764647e-07, + "loss": 1.3786, + "step": 5431 + }, + { + "epoch": 0.9322121160116699, + "grad_norm": 4.582613285977406, + "learning_rate": 2.4005698431081356e-07, + "loss": 1.4374, + "step": 5432 + }, + { + "epoch": 0.9323837309078428, + "grad_norm": 5.148065210219013, + "learning_rate": 2.3884790024156115e-07, + "loss": 1.8179, + "step": 5433 + }, + { + "epoch": 0.9325553458040158, + "grad_norm": 4.4222829713928515, + "learning_rate": 2.376418319304441e-07, + "loss": 1.432, + "step": 5434 + }, + { + "epoch": 0.9327269607001888, + "grad_norm": 4.866943439553488, + "learning_rate": 2.3643877975008223e-07, + "loss": 1.551, + "step": 5435 + }, + { + "epoch": 0.9328985755963618, + "grad_norm": 4.775044992338535, + "learning_rate": 2.3523874407216597e-07, + "loss": 1.448, + "step": 5436 + }, + { + "epoch": 0.9330701904925347, + "grad_norm": 4.14479706167544, + "learning_rate": 2.3404172526744984e-07, + "loss": 1.3905, + "step": 5437 + }, + { + "epoch": 0.9332418053887077, + "grad_norm": 5.441804555677087, + "learning_rate": 2.3284772370576137e-07, + "loss": 1.7822, + "step": 5438 + }, + { + "epoch": 0.9334134202848807, + "grad_norm": 4.423596700926595, + "learning_rate": 2.3165673975599213e-07, + "loss": 1.4915, + "step": 5439 + }, + { + "epoch": 0.9335850351810537, + "grad_norm": 4.150493018626314, + "learning_rate": 2.3046877378610443e-07, + "loss": 1.5582, + "step": 5440 + }, + { + "epoch": 0.9337566500772267, + "grad_norm": 5.125956238059363, + "learning_rate": 2.2928382616312582e-07, + "loss": 1.5916, + "step": 5441 + }, + { + "epoch": 0.9339282649733996, + "grad_norm": 4.124233777280232, + "learning_rate": 2.2810189725315013e-07, + "loss": 1.4833, + "step": 5442 + }, + { + "epoch": 0.9340998798695727, + "grad_norm": 4.686028271545141, + "learning_rate": 2.2692298742134188e-07, + "loss": 1.5007, + "step": 5443 + }, + { + "epoch": 0.9342714947657457, + "grad_norm": 3.4058910946900793, + "learning_rate": 2.2574709703193086e-07, + "loss": 1.1171, + "step": 5444 + }, + { + "epoch": 0.9344431096619187, + "grad_norm": 4.879661854689699, + "learning_rate": 2.2457422644821535e-07, + "loss": 1.488, + "step": 5445 + }, + { + "epoch": 0.9346147245580917, + "grad_norm": 4.140283141104058, + "learning_rate": 2.2340437603255994e-07, + "loss": 1.5075, + "step": 5446 + }, + { + "epoch": 0.9347863394542646, + "grad_norm": 4.383123977705686, + "learning_rate": 2.2223754614639437e-07, + "loss": 1.4522, + "step": 5447 + }, + { + "epoch": 0.9349579543504376, + "grad_norm": 5.885875304656546, + "learning_rate": 2.2107373715021696e-07, + "loss": 1.5742, + "step": 5448 + }, + { + "epoch": 0.9351295692466106, + "grad_norm": 4.944101652425936, + "learning_rate": 2.1991294940359343e-07, + "loss": 1.4143, + "step": 5449 + }, + { + "epoch": 0.9353011841427836, + "grad_norm": 3.99872897697736, + "learning_rate": 2.1875518326515578e-07, + "loss": 1.274, + "step": 5450 + }, + { + "epoch": 0.9354727990389565, + "grad_norm": 4.651661534779984, + "learning_rate": 2.176004390926001e-07, + "loss": 1.4185, + "step": 5451 + }, + { + "epoch": 0.9356444139351295, + "grad_norm": 3.781247208678973, + "learning_rate": 2.1644871724269102e-07, + "loss": 1.4458, + "step": 5452 + }, + { + "epoch": 0.9358160288313025, + "grad_norm": 4.1797280880035785, + "learning_rate": 2.1530001807125944e-07, + "loss": 1.5128, + "step": 5453 + }, + { + "epoch": 0.9359876437274756, + "grad_norm": 5.149269990553209, + "learning_rate": 2.1415434193320038e-07, + "loss": 1.4525, + "step": 5454 + }, + { + "epoch": 0.9361592586236486, + "grad_norm": 5.188102171780974, + "learning_rate": 2.130116891824796e-07, + "loss": 1.5899, + "step": 5455 + }, + { + "epoch": 0.9363308735198215, + "grad_norm": 4.982838307095838, + "learning_rate": 2.1187206017212248e-07, + "loss": 1.687, + "step": 5456 + }, + { + "epoch": 0.9365024884159945, + "grad_norm": 4.694262359670236, + "learning_rate": 2.1073545525422402e-07, + "loss": 1.5309, + "step": 5457 + }, + { + "epoch": 0.9366741033121675, + "grad_norm": 4.012266792074211, + "learning_rate": 2.0960187477994444e-07, + "loss": 1.5667, + "step": 5458 + }, + { + "epoch": 0.9368457182083405, + "grad_norm": 4.6351374309440025, + "learning_rate": 2.0847131909950912e-07, + "loss": 1.5457, + "step": 5459 + }, + { + "epoch": 0.9370173331045135, + "grad_norm": 4.049863812346382, + "learning_rate": 2.0734378856221092e-07, + "loss": 1.1781, + "step": 5460 + }, + { + "epoch": 0.9371889480006864, + "grad_norm": 4.120135750766048, + "learning_rate": 2.0621928351640231e-07, + "loss": 1.4823, + "step": 5461 + }, + { + "epoch": 0.9373605628968594, + "grad_norm": 3.8214226303817167, + "learning_rate": 2.0509780430950754e-07, + "loss": 1.3756, + "step": 5462 + }, + { + "epoch": 0.9375321777930324, + "grad_norm": 4.287005650149692, + "learning_rate": 2.0397935128801283e-07, + "loss": 1.5436, + "step": 5463 + }, + { + "epoch": 0.9377037926892055, + "grad_norm": 4.251647074771913, + "learning_rate": 2.0286392479747064e-07, + "loss": 1.4238, + "step": 5464 + }, + { + "epoch": 0.9378754075853785, + "grad_norm": 4.279691404522448, + "learning_rate": 2.0175152518249753e-07, + "loss": 1.6744, + "step": 5465 + }, + { + "epoch": 0.9380470224815514, + "grad_norm": 5.126031706890018, + "learning_rate": 2.0064215278677412e-07, + "loss": 1.6748, + "step": 5466 + }, + { + "epoch": 0.9382186373777244, + "grad_norm": 4.303546507230215, + "learning_rate": 1.9953580795304628e-07, + "loss": 1.4509, + "step": 5467 + }, + { + "epoch": 0.9383902522738974, + "grad_norm": 4.23722789041591, + "learning_rate": 1.9843249102312722e-07, + "loss": 1.289, + "step": 5468 + }, + { + "epoch": 0.9385618671700704, + "grad_norm": 4.465369397607201, + "learning_rate": 1.9733220233789097e-07, + "loss": 1.486, + "step": 5469 + }, + { + "epoch": 0.9387334820662433, + "grad_norm": 4.067478812139229, + "learning_rate": 1.962349422372778e-07, + "loss": 1.3555, + "step": 5470 + }, + { + "epoch": 0.9389050969624163, + "grad_norm": 3.9450981044114175, + "learning_rate": 1.9514071106028987e-07, + "loss": 1.3921, + "step": 5471 + }, + { + "epoch": 0.9390767118585893, + "grad_norm": 4.62484541639502, + "learning_rate": 1.9404950914499787e-07, + "loss": 1.485, + "step": 5472 + }, + { + "epoch": 0.9392483267547623, + "grad_norm": 4.970890379453228, + "learning_rate": 1.9296133682853436e-07, + "loss": 1.5941, + "step": 5473 + }, + { + "epoch": 0.9394199416509353, + "grad_norm": 4.729506984296073, + "learning_rate": 1.9187619444709483e-07, + "loss": 1.4279, + "step": 5474 + }, + { + "epoch": 0.9395915565471082, + "grad_norm": 4.426615000149878, + "learning_rate": 1.9079408233593998e-07, + "loss": 1.6161, + "step": 5475 + }, + { + "epoch": 0.9397631714432813, + "grad_norm": 5.170515051255126, + "learning_rate": 1.8971500082939354e-07, + "loss": 1.5884, + "step": 5476 + }, + { + "epoch": 0.9399347863394543, + "grad_norm": 4.415585548610362, + "learning_rate": 1.886389502608421e-07, + "loss": 1.7015, + "step": 5477 + }, + { + "epoch": 0.9401064012356273, + "grad_norm": 4.501590145025204, + "learning_rate": 1.8756593096273978e-07, + "loss": 1.7003, + "step": 5478 + }, + { + "epoch": 0.9402780161318003, + "grad_norm": 4.49601989494037, + "learning_rate": 1.8649594326660024e-07, + "loss": 1.6124, + "step": 5479 + }, + { + "epoch": 0.9404496310279732, + "grad_norm": 5.03603978096081, + "learning_rate": 1.854289875030002e-07, + "loss": 1.4852, + "step": 5480 + }, + { + "epoch": 0.9406212459241462, + "grad_norm": 4.046174708184559, + "learning_rate": 1.843650640015826e-07, + "loss": 1.5849, + "step": 5481 + }, + { + "epoch": 0.9407928608203192, + "grad_norm": 4.255537810701056, + "learning_rate": 1.8330417309105342e-07, + "loss": 1.6783, + "step": 5482 + }, + { + "epoch": 0.9409644757164922, + "grad_norm": 4.075111191752301, + "learning_rate": 1.822463150991771e-07, + "loss": 1.1585, + "step": 5483 + }, + { + "epoch": 0.9411360906126652, + "grad_norm": 5.953991043181289, + "learning_rate": 1.811914903527867e-07, + "loss": 1.5888, + "step": 5484 + }, + { + "epoch": 0.9413077055088381, + "grad_norm": 5.279935510968091, + "learning_rate": 1.8013969917777484e-07, + "loss": 1.6853, + "step": 5485 + }, + { + "epoch": 0.9414793204050111, + "grad_norm": 4.382044619510773, + "learning_rate": 1.7909094189909826e-07, + "loss": 1.5727, + "step": 5486 + }, + { + "epoch": 0.9416509353011842, + "grad_norm": 6.203155445137067, + "learning_rate": 1.780452188407744e-07, + "loss": 1.7352, + "step": 5487 + }, + { + "epoch": 0.9418225501973572, + "grad_norm": 4.7427327154515035, + "learning_rate": 1.770025303258882e-07, + "loss": 1.4214, + "step": 5488 + }, + { + "epoch": 0.9419941650935301, + "grad_norm": 4.256842350675999, + "learning_rate": 1.7596287667658085e-07, + "loss": 1.5036, + "step": 5489 + }, + { + "epoch": 0.9421657799897031, + "grad_norm": 4.83648651138836, + "learning_rate": 1.7492625821405872e-07, + "loss": 1.6256, + "step": 5490 + }, + { + "epoch": 0.9423373948858761, + "grad_norm": 4.53033515886486, + "learning_rate": 1.738926752585901e-07, + "loss": 1.659, + "step": 5491 + }, + { + "epoch": 0.9425090097820491, + "grad_norm": 4.174373651973854, + "learning_rate": 1.7286212812950842e-07, + "loss": 1.3821, + "step": 5492 + }, + { + "epoch": 0.9426806246782221, + "grad_norm": 4.920264499057055, + "learning_rate": 1.7183461714520455e-07, + "loss": 1.6867, + "step": 5493 + }, + { + "epoch": 0.942852239574395, + "grad_norm": 4.242615109063735, + "learning_rate": 1.7081014262313344e-07, + "loss": 1.5198, + "step": 5494 + }, + { + "epoch": 0.943023854470568, + "grad_norm": 4.1052912146059395, + "learning_rate": 1.6978870487981082e-07, + "loss": 1.4706, + "step": 5495 + }, + { + "epoch": 0.943195469366741, + "grad_norm": 4.467767061948136, + "learning_rate": 1.687703042308164e-07, + "loss": 1.5312, + "step": 5496 + }, + { + "epoch": 0.943367084262914, + "grad_norm": 4.689116423577103, + "learning_rate": 1.6775494099078971e-07, + "loss": 1.4992, + "step": 5497 + }, + { + "epoch": 0.9435386991590871, + "grad_norm": 4.1132320005324985, + "learning_rate": 1.6674261547343417e-07, + "loss": 1.5359, + "step": 5498 + }, + { + "epoch": 0.94371031405526, + "grad_norm": 5.082596245702644, + "learning_rate": 1.6573332799151076e-07, + "loss": 1.6278, + "step": 5499 + }, + { + "epoch": 0.943881928951433, + "grad_norm": 4.250666476463317, + "learning_rate": 1.6472707885684557e-07, + "loss": 1.322, + "step": 5500 + }, + { + "epoch": 0.944053543847606, + "grad_norm": 4.965076369426824, + "learning_rate": 1.6372386838032218e-07, + "loss": 1.7252, + "step": 5501 + }, + { + "epoch": 0.944225158743779, + "grad_norm": 4.612750041092019, + "learning_rate": 1.6272369687189038e-07, + "loss": 1.3538, + "step": 5502 + }, + { + "epoch": 0.944396773639952, + "grad_norm": 4.575129781882723, + "learning_rate": 1.6172656464055748e-07, + "loss": 1.469, + "step": 5503 + }, + { + "epoch": 0.9445683885361249, + "grad_norm": 3.933610133607644, + "learning_rate": 1.6073247199439034e-07, + "loss": 1.4181, + "step": 5504 + }, + { + "epoch": 0.9447400034322979, + "grad_norm": 4.0560079355778855, + "learning_rate": 1.5974141924052222e-07, + "loss": 1.5681, + "step": 5505 + }, + { + "epoch": 0.9449116183284709, + "grad_norm": 4.814534286449412, + "learning_rate": 1.5875340668514262e-07, + "loss": 1.3591, + "step": 5506 + }, + { + "epoch": 0.9450832332246439, + "grad_norm": 4.002163378366633, + "learning_rate": 1.5776843463350288e-07, + "loss": 1.3633, + "step": 5507 + }, + { + "epoch": 0.9452548481208168, + "grad_norm": 4.906710382709269, + "learning_rate": 1.567865033899163e-07, + "loss": 1.4755, + "step": 5508 + }, + { + "epoch": 0.9454264630169898, + "grad_norm": 4.595955671730814, + "learning_rate": 1.5580761325775352e-07, + "loss": 1.786, + "step": 5509 + }, + { + "epoch": 0.9455980779131629, + "grad_norm": 4.884715002591531, + "learning_rate": 1.5483176453944814e-07, + "loss": 1.5741, + "step": 5510 + }, + { + "epoch": 0.9457696928093359, + "grad_norm": 4.660137512620219, + "learning_rate": 1.538589575364946e-07, + "loss": 1.7246, + "step": 5511 + }, + { + "epoch": 0.9459413077055089, + "grad_norm": 4.914873791354064, + "learning_rate": 1.52889192549448e-07, + "loss": 1.5279, + "step": 5512 + }, + { + "epoch": 0.9461129226016818, + "grad_norm": 5.12123560290714, + "learning_rate": 1.519224698779198e-07, + "loss": 1.7243, + "step": 5513 + }, + { + "epoch": 0.9462845374978548, + "grad_norm": 4.709705897755593, + "learning_rate": 1.5095878982058442e-07, + "loss": 1.4513, + "step": 5514 + }, + { + "epoch": 0.9464561523940278, + "grad_norm": 5.175152832964303, + "learning_rate": 1.4999815267517593e-07, + "loss": 1.4411, + "step": 5515 + }, + { + "epoch": 0.9466277672902008, + "grad_norm": 4.409390626975295, + "learning_rate": 1.49040558738488e-07, + "loss": 1.3232, + "step": 5516 + }, + { + "epoch": 0.9467993821863738, + "grad_norm": 4.354459610064533, + "learning_rate": 1.480860083063762e-07, + "loss": 1.4545, + "step": 5517 + }, + { + "epoch": 0.9469709970825467, + "grad_norm": 3.986037892439437, + "learning_rate": 1.4713450167375022e-07, + "loss": 1.4302, + "step": 5518 + }, + { + "epoch": 0.9471426119787197, + "grad_norm": 5.039051237389449, + "learning_rate": 1.4618603913458596e-07, + "loss": 1.3696, + "step": 5519 + }, + { + "epoch": 0.9473142268748928, + "grad_norm": 4.902122930097657, + "learning_rate": 1.452406209819135e-07, + "loss": 1.3077, + "step": 5520 + }, + { + "epoch": 0.9474858417710658, + "grad_norm": 5.904590933452497, + "learning_rate": 1.4429824750782583e-07, + "loss": 1.4765, + "step": 5521 + }, + { + "epoch": 0.9476574566672387, + "grad_norm": 4.994552490517991, + "learning_rate": 1.4335891900347453e-07, + "loss": 1.6367, + "step": 5522 + }, + { + "epoch": 0.9478290715634117, + "grad_norm": 4.248039818352286, + "learning_rate": 1.4242263575906966e-07, + "loss": 1.4552, + "step": 5523 + }, + { + "epoch": 0.9480006864595847, + "grad_norm": 3.926366421870658, + "learning_rate": 1.4148939806387986e-07, + "loss": 1.3853, + "step": 5524 + }, + { + "epoch": 0.9481723013557577, + "grad_norm": 3.8921543269617787, + "learning_rate": 1.4055920620623443e-07, + "loss": 1.311, + "step": 5525 + }, + { + "epoch": 0.9483439162519307, + "grad_norm": 4.511433098985666, + "learning_rate": 1.396320604735213e-07, + "loss": 1.5943, + "step": 5526 + }, + { + "epoch": 0.9485155311481036, + "grad_norm": 4.532233086564658, + "learning_rate": 1.3870796115218688e-07, + "loss": 1.6102, + "step": 5527 + }, + { + "epoch": 0.9486871460442766, + "grad_norm": 5.046622565248214, + "learning_rate": 1.37786908527735e-07, + "loss": 1.3627, + "step": 5528 + }, + { + "epoch": 0.9488587609404496, + "grad_norm": 4.5000477370654055, + "learning_rate": 1.368689028847303e-07, + "loss": 1.4448, + "step": 5529 + }, + { + "epoch": 0.9490303758366226, + "grad_norm": 4.221493859098195, + "learning_rate": 1.35953944506797e-07, + "loss": 1.4925, + "step": 5530 + }, + { + "epoch": 0.9492019907327957, + "grad_norm": 4.451180414540635, + "learning_rate": 1.350420336766134e-07, + "loss": 1.5551, + "step": 5531 + }, + { + "epoch": 0.9493736056289686, + "grad_norm": 4.108782222822078, + "learning_rate": 1.341331706759219e-07, + "loss": 1.5612, + "step": 5532 + }, + { + "epoch": 0.9495452205251416, + "grad_norm": 4.725777889577813, + "learning_rate": 1.3322735578551794e-07, + "loss": 1.4624, + "step": 5533 + }, + { + "epoch": 0.9497168354213146, + "grad_norm": 4.398872234583014, + "learning_rate": 1.323245892852587e-07, + "loss": 1.6568, + "step": 5534 + }, + { + "epoch": 0.9498884503174876, + "grad_norm": 4.415741481311043, + "learning_rate": 1.314248714540589e-07, + "loss": 1.6578, + "step": 5535 + }, + { + "epoch": 0.9500600652136606, + "grad_norm": 4.281254580853789, + "learning_rate": 1.3052820256989174e-07, + "loss": 1.4232, + "step": 5536 + }, + { + "epoch": 0.9502316801098335, + "grad_norm": 5.0435488226525225, + "learning_rate": 1.296345829097856e-07, + "loss": 1.5888, + "step": 5537 + }, + { + "epoch": 0.9504032950060065, + "grad_norm": 5.46652166659214, + "learning_rate": 1.287440127498296e-07, + "loss": 1.5877, + "step": 5538 + }, + { + "epoch": 0.9505749099021795, + "grad_norm": 4.513994269781327, + "learning_rate": 1.278564923651704e-07, + "loss": 1.5309, + "step": 5539 + }, + { + "epoch": 0.9507465247983525, + "grad_norm": 4.309112779653492, + "learning_rate": 1.2697202203001192e-07, + "loss": 1.4586, + "step": 5540 + }, + { + "epoch": 0.9509181396945254, + "grad_norm": 5.343333296569606, + "learning_rate": 1.2609060201761558e-07, + "loss": 1.4457, + "step": 5541 + }, + { + "epoch": 0.9510897545906984, + "grad_norm": 4.118652672759147, + "learning_rate": 1.2521223260030135e-07, + "loss": 1.4706, + "step": 5542 + }, + { + "epoch": 0.9512613694868715, + "grad_norm": 5.6021818516241515, + "learning_rate": 1.2433691404944548e-07, + "loss": 1.5353, + "step": 5543 + }, + { + "epoch": 0.9514329843830445, + "grad_norm": 4.450252790102887, + "learning_rate": 1.2346464663548053e-07, + "loss": 1.132, + "step": 5544 + }, + { + "epoch": 0.9516045992792175, + "grad_norm": 4.1302892229467005, + "learning_rate": 1.225954306279009e-07, + "loss": 1.3012, + "step": 5545 + }, + { + "epoch": 0.9517762141753904, + "grad_norm": 4.73965807784184, + "learning_rate": 1.2172926629525406e-07, + "loss": 1.5539, + "step": 5546 + }, + { + "epoch": 0.9519478290715634, + "grad_norm": 4.197637706686702, + "learning_rate": 1.2086615390514477e-07, + "loss": 1.4117, + "step": 5547 + }, + { + "epoch": 0.9521194439677364, + "grad_norm": 5.52844818643221, + "learning_rate": 1.2000609372423865e-07, + "loss": 1.5033, + "step": 5548 + }, + { + "epoch": 0.9522910588639094, + "grad_norm": 4.259515764839677, + "learning_rate": 1.1914908601825204e-07, + "loss": 1.4461, + "step": 5549 + }, + { + "epoch": 0.9524626737600824, + "grad_norm": 4.379129780854141, + "learning_rate": 1.1829513105196533e-07, + "loss": 1.4342, + "step": 5550 + }, + { + "epoch": 0.9526342886562553, + "grad_norm": 4.061496755627444, + "learning_rate": 1.1744422908921193e-07, + "loss": 1.3807, + "step": 5551 + }, + { + "epoch": 0.9528059035524283, + "grad_norm": 4.107654935189843, + "learning_rate": 1.1659638039287935e-07, + "loss": 1.483, + "step": 5552 + }, + { + "epoch": 0.9529775184486013, + "grad_norm": 4.20693366150938, + "learning_rate": 1.1575158522491803e-07, + "loss": 1.2848, + "step": 5553 + }, + { + "epoch": 0.9531491333447744, + "grad_norm": 4.39066981228261, + "learning_rate": 1.1490984384633142e-07, + "loss": 1.4661, + "step": 5554 + }, + { + "epoch": 0.9533207482409474, + "grad_norm": 4.774239545298379, + "learning_rate": 1.1407115651717704e-07, + "loss": 1.4656, + "step": 5555 + }, + { + "epoch": 0.9534923631371203, + "grad_norm": 4.513545789220338, + "learning_rate": 1.1323552349657541e-07, + "loss": 1.4191, + "step": 5556 + }, + { + "epoch": 0.9536639780332933, + "grad_norm": 4.401696750785899, + "learning_rate": 1.1240294504269777e-07, + "loss": 1.4805, + "step": 5557 + }, + { + "epoch": 0.9538355929294663, + "grad_norm": 4.419973212132363, + "learning_rate": 1.1157342141277283e-07, + "loss": 1.3783, + "step": 5558 + }, + { + "epoch": 0.9540072078256393, + "grad_norm": 4.046858009648834, + "learning_rate": 1.1074695286308667e-07, + "loss": 1.4337, + "step": 5559 + }, + { + "epoch": 0.9541788227218122, + "grad_norm": 4.88405537694404, + "learning_rate": 1.0992353964898283e-07, + "loss": 1.6405, + "step": 5560 + }, + { + "epoch": 0.9543504376179852, + "grad_norm": 4.513552996548548, + "learning_rate": 1.0910318202485782e-07, + "loss": 1.3132, + "step": 5561 + }, + { + "epoch": 0.9545220525141582, + "grad_norm": 4.689991520897372, + "learning_rate": 1.082858802441633e-07, + "loss": 1.5871, + "step": 5562 + }, + { + "epoch": 0.9546936674103312, + "grad_norm": 3.711974722691003, + "learning_rate": 1.0747163455941179e-07, + "loss": 1.444, + "step": 5563 + }, + { + "epoch": 0.9548652823065042, + "grad_norm": 5.398649179804981, + "learning_rate": 1.0666044522216645e-07, + "loss": 1.7823, + "step": 5564 + }, + { + "epoch": 0.9550368972026771, + "grad_norm": 4.637181411813782, + "learning_rate": 1.0585231248305017e-07, + "loss": 1.4848, + "step": 5565 + }, + { + "epoch": 0.9552085120988502, + "grad_norm": 4.921065162458169, + "learning_rate": 1.0504723659173987e-07, + "loss": 1.5463, + "step": 5566 + }, + { + "epoch": 0.9553801269950232, + "grad_norm": 4.675508501946252, + "learning_rate": 1.0424521779696661e-07, + "loss": 1.6674, + "step": 5567 + }, + { + "epoch": 0.9555517418911962, + "grad_norm": 4.166551009202157, + "learning_rate": 1.0344625634651773e-07, + "loss": 1.4283, + "step": 5568 + }, + { + "epoch": 0.9557233567873692, + "grad_norm": 4.762150315402959, + "learning_rate": 1.0265035248723798e-07, + "loss": 1.3742, + "step": 5569 + }, + { + "epoch": 0.9558949716835421, + "grad_norm": 4.368923435623675, + "learning_rate": 1.0185750646502623e-07, + "loss": 1.5163, + "step": 5570 + }, + { + "epoch": 0.9560665865797151, + "grad_norm": 4.254634715659754, + "learning_rate": 1.0106771852483432e-07, + "loss": 1.409, + "step": 5571 + }, + { + "epoch": 0.9562382014758881, + "grad_norm": 5.6605593576952415, + "learning_rate": 1.0028098891067262e-07, + "loss": 1.5272, + "step": 5572 + }, + { + "epoch": 0.9564098163720611, + "grad_norm": 4.271190562672141, + "learning_rate": 9.949731786560557e-08, + "loss": 1.2537, + "step": 5573 + }, + { + "epoch": 0.956581431268234, + "grad_norm": 4.388912580337789, + "learning_rate": 9.871670563175173e-08, + "loss": 1.4486, + "step": 5574 + }, + { + "epoch": 0.956753046164407, + "grad_norm": 6.610827677646732, + "learning_rate": 9.793915245028595e-08, + "loss": 1.6385, + "step": 5575 + }, + { + "epoch": 0.95692466106058, + "grad_norm": 3.908251639364815, + "learning_rate": 9.716465856143608e-08, + "loss": 1.4899, + "step": 5576 + }, + { + "epoch": 0.9570962759567531, + "grad_norm": 4.1041587282985885, + "learning_rate": 9.639322420448627e-08, + "loss": 1.4772, + "step": 5577 + }, + { + "epoch": 0.9572678908529261, + "grad_norm": 4.103523520494298, + "learning_rate": 9.562484961777696e-08, + "loss": 1.3929, + "step": 5578 + }, + { + "epoch": 0.957439505749099, + "grad_norm": 5.03495639522032, + "learning_rate": 9.485953503869827e-08, + "loss": 1.4996, + "step": 5579 + }, + { + "epoch": 0.957611120645272, + "grad_norm": 4.3567043272124195, + "learning_rate": 9.409728070370217e-08, + "loss": 1.648, + "step": 5580 + }, + { + "epoch": 0.957782735541445, + "grad_norm": 4.989616679241597, + "learning_rate": 9.333808684828693e-08, + "loss": 1.4442, + "step": 5581 + }, + { + "epoch": 0.957954350437618, + "grad_norm": 4.236021356788596, + "learning_rate": 9.258195370701161e-08, + "loss": 1.5483, + "step": 5582 + }, + { + "epoch": 0.958125965333791, + "grad_norm": 4.267173572456094, + "learning_rate": 9.182888151348712e-08, + "loss": 1.6046, + "step": 5583 + }, + { + "epoch": 0.9582975802299639, + "grad_norm": 4.245959539165511, + "learning_rate": 9.107887050037845e-08, + "loss": 1.448, + "step": 5584 + }, + { + "epoch": 0.9584691951261369, + "grad_norm": 4.770992351179536, + "learning_rate": 9.03319208994069e-08, + "loss": 1.3618, + "step": 5585 + }, + { + "epoch": 0.9586408100223099, + "grad_norm": 7.281904780279344, + "learning_rate": 8.958803294134455e-08, + "loss": 1.611, + "step": 5586 + }, + { + "epoch": 0.958812424918483, + "grad_norm": 3.938487005137157, + "learning_rate": 8.884720685601978e-08, + "loss": 1.4523, + "step": 5587 + }, + { + "epoch": 0.958984039814656, + "grad_norm": 4.3884354989483185, + "learning_rate": 8.810944287231504e-08, + "loss": 1.4561, + "step": 5588 + }, + { + "epoch": 0.9591556547108289, + "grad_norm": 4.033949352532372, + "learning_rate": 8.737474121816691e-08, + "loss": 1.5197, + "step": 5589 + }, + { + "epoch": 0.9593272696070019, + "grad_norm": 3.9625866466914257, + "learning_rate": 8.664310212056493e-08, + "loss": 1.2878, + "step": 5590 + }, + { + "epoch": 0.9594988845031749, + "grad_norm": 3.845998593217139, + "learning_rate": 8.59145258055516e-08, + "loss": 1.2817, + "step": 5591 + }, + { + "epoch": 0.9596704993993479, + "grad_norm": 4.619156431167044, + "learning_rate": 8.518901249822576e-08, + "loss": 1.5144, + "step": 5592 + }, + { + "epoch": 0.9598421142955208, + "grad_norm": 4.182687252249632, + "learning_rate": 8.446656242273699e-08, + "loss": 1.4119, + "step": 5593 + }, + { + "epoch": 0.9600137291916938, + "grad_norm": 4.933676338033103, + "learning_rate": 8.374717580229119e-08, + "loss": 1.4332, + "step": 5594 + }, + { + "epoch": 0.9601853440878668, + "grad_norm": 4.323055042475596, + "learning_rate": 8.303085285914614e-08, + "loss": 1.6665, + "step": 5595 + }, + { + "epoch": 0.9603569589840398, + "grad_norm": 6.3717991565874295, + "learning_rate": 8.231759381461368e-08, + "loss": 1.5652, + "step": 5596 + }, + { + "epoch": 0.9605285738802128, + "grad_norm": 4.441330403516568, + "learning_rate": 8.160739888905867e-08, + "loss": 1.7358, + "step": 5597 + }, + { + "epoch": 0.9607001887763857, + "grad_norm": 4.640337077955984, + "learning_rate": 8.090026830190001e-08, + "loss": 1.4833, + "step": 5598 + }, + { + "epoch": 0.9608718036725588, + "grad_norm": 3.9971018482858973, + "learning_rate": 8.019620227160852e-08, + "loss": 1.2332, + "step": 5599 + }, + { + "epoch": 0.9610434185687318, + "grad_norm": 4.572150917235713, + "learning_rate": 7.949520101570907e-08, + "loss": 1.6356, + "step": 5600 + }, + { + "epoch": 0.9612150334649048, + "grad_norm": 4.053242958053849, + "learning_rate": 7.879726475077953e-08, + "loss": 1.2053, + "step": 5601 + }, + { + "epoch": 0.9613866483610778, + "grad_norm": 4.269274464377621, + "learning_rate": 7.810239369245187e-08, + "loss": 1.5149, + "step": 5602 + }, + { + "epoch": 0.9615582632572507, + "grad_norm": 5.222369099471507, + "learning_rate": 7.741058805540991e-08, + "loss": 1.5011, + "step": 5603 + }, + { + "epoch": 0.9617298781534237, + "grad_norm": 4.14591109332786, + "learning_rate": 7.672184805338934e-08, + "loss": 1.4887, + "step": 5604 + }, + { + "epoch": 0.9619014930495967, + "grad_norm": 5.256019734077203, + "learning_rate": 7.603617389918106e-08, + "loss": 1.3953, + "step": 5605 + }, + { + "epoch": 0.9620731079457697, + "grad_norm": 4.798890066271091, + "learning_rate": 7.535356580462672e-08, + "loss": 1.699, + "step": 5606 + }, + { + "epoch": 0.9622447228419427, + "grad_norm": 4.562986119179497, + "learning_rate": 7.467402398062206e-08, + "loss": 1.5486, + "step": 5607 + }, + { + "epoch": 0.9624163377381156, + "grad_norm": 4.378363309374198, + "learning_rate": 7.399754863711583e-08, + "loss": 1.4836, + "step": 5608 + }, + { + "epoch": 0.9625879526342886, + "grad_norm": 4.2700541625191635, + "learning_rate": 7.332413998310861e-08, + "loss": 1.4473, + "step": 5609 + }, + { + "epoch": 0.9627595675304617, + "grad_norm": 4.870812737206464, + "learning_rate": 7.265379822665064e-08, + "loss": 1.5347, + "step": 5610 + }, + { + "epoch": 0.9629311824266347, + "grad_norm": 4.932673098818039, + "learning_rate": 7.19865235748507e-08, + "loss": 1.5467, + "step": 5611 + }, + { + "epoch": 0.9631027973228076, + "grad_norm": 4.32960987656462, + "learning_rate": 7.132231623386498e-08, + "loss": 1.5214, + "step": 5612 + }, + { + "epoch": 0.9632744122189806, + "grad_norm": 4.5411362193102045, + "learning_rate": 7.066117640890491e-08, + "loss": 1.423, + "step": 5613 + }, + { + "epoch": 0.9634460271151536, + "grad_norm": 4.390075950257135, + "learning_rate": 7.000310430423262e-08, + "loss": 1.4445, + "step": 5614 + }, + { + "epoch": 0.9636176420113266, + "grad_norm": 3.8029473734423775, + "learning_rate": 6.934810012316218e-08, + "loss": 1.2234, + "step": 5615 + }, + { + "epoch": 0.9637892569074996, + "grad_norm": 4.437628943541048, + "learning_rate": 6.869616406806056e-08, + "loss": 1.4683, + "step": 5616 + }, + { + "epoch": 0.9639608718036725, + "grad_norm": 4.691688383601371, + "learning_rate": 6.804729634034779e-08, + "loss": 1.616, + "step": 5617 + }, + { + "epoch": 0.9641324866998455, + "grad_norm": 4.27240001711328, + "learning_rate": 6.74014971404946e-08, + "loss": 1.5508, + "step": 5618 + }, + { + "epoch": 0.9643041015960185, + "grad_norm": 5.11907953126176, + "learning_rate": 6.67587666680236e-08, + "loss": 1.7056, + "step": 5619 + }, + { + "epoch": 0.9644757164921915, + "grad_norm": 4.160933818593476, + "learning_rate": 6.611910512150932e-08, + "loss": 1.4335, + "step": 5620 + }, + { + "epoch": 0.9646473313883646, + "grad_norm": 5.029390805302452, + "learning_rate": 6.54825126985792e-08, + "loss": 1.4893, + "step": 5621 + }, + { + "epoch": 0.9648189462845375, + "grad_norm": 4.078951692921687, + "learning_rate": 6.484898959591146e-08, + "loss": 1.489, + "step": 5622 + }, + { + "epoch": 0.9649905611807105, + "grad_norm": 4.712814792856105, + "learning_rate": 6.421853600923622e-08, + "loss": 1.3516, + "step": 5623 + }, + { + "epoch": 0.9651621760768835, + "grad_norm": 4.677202471234876, + "learning_rate": 6.359115213333544e-08, + "loss": 1.4982, + "step": 5624 + }, + { + "epoch": 0.9653337909730565, + "grad_norm": 4.557253335292012, + "learning_rate": 6.296683816204185e-08, + "loss": 1.5158, + "step": 5625 + }, + { + "epoch": 0.9655054058692294, + "grad_norm": 4.61112946644263, + "learning_rate": 6.234559428824227e-08, + "loss": 1.4966, + "step": 5626 + }, + { + "epoch": 0.9656770207654024, + "grad_norm": 4.765200495378624, + "learning_rate": 6.172742070387205e-08, + "loss": 1.5975, + "step": 5627 + }, + { + "epoch": 0.9658486356615754, + "grad_norm": 6.824739231345612, + "learning_rate": 6.111231759991954e-08, + "loss": 1.6472, + "step": 5628 + }, + { + "epoch": 0.9660202505577484, + "grad_norm": 5.212286910268575, + "learning_rate": 6.05002851664227e-08, + "loss": 1.6497, + "step": 5629 + }, + { + "epoch": 0.9661918654539214, + "grad_norm": 4.36262715050695, + "learning_rate": 5.989132359247363e-08, + "loss": 1.4278, + "step": 5630 + }, + { + "epoch": 0.9663634803500943, + "grad_norm": 4.789479105511576, + "learning_rate": 5.928543306621293e-08, + "loss": 1.3255, + "step": 5631 + }, + { + "epoch": 0.9665350952462674, + "grad_norm": 5.011786989281166, + "learning_rate": 5.868261377483531e-08, + "loss": 1.5388, + "step": 5632 + }, + { + "epoch": 0.9667067101424404, + "grad_norm": 4.1783168379108035, + "learning_rate": 5.80828659045829e-08, + "loss": 1.3907, + "step": 5633 + }, + { + "epoch": 0.9668783250386134, + "grad_norm": 4.580360306910773, + "learning_rate": 5.7486189640751923e-08, + "loss": 1.6093, + "step": 5634 + }, + { + "epoch": 0.9670499399347864, + "grad_norm": 4.518115070999871, + "learning_rate": 5.689258516768825e-08, + "loss": 1.4196, + "step": 5635 + }, + { + "epoch": 0.9672215548309593, + "grad_norm": 4.6891687237239665, + "learning_rate": 5.6302052668789634e-08, + "loss": 1.6617, + "step": 5636 + }, + { + "epoch": 0.9673931697271323, + "grad_norm": 4.098383764712945, + "learning_rate": 5.5714592326504555e-08, + "loss": 1.4884, + "step": 5637 + }, + { + "epoch": 0.9675647846233053, + "grad_norm": 4.37431239787366, + "learning_rate": 5.5130204322330073e-08, + "loss": 1.4873, + "step": 5638 + }, + { + "epoch": 0.9677363995194783, + "grad_norm": 4.31552391862251, + "learning_rate": 5.4548888836817305e-08, + "loss": 1.4955, + "step": 5639 + }, + { + "epoch": 0.9679080144156513, + "grad_norm": 4.460151511260132, + "learning_rate": 5.3970646049564815e-08, + "loss": 1.4606, + "step": 5640 + }, + { + "epoch": 0.9680796293118242, + "grad_norm": 4.183143841970551, + "learning_rate": 5.339547613922635e-08, + "loss": 1.4728, + "step": 5641 + }, + { + "epoch": 0.9682512442079972, + "grad_norm": 3.4693265311756774, + "learning_rate": 5.282337928350312e-08, + "loss": 1.3582, + "step": 5642 + }, + { + "epoch": 0.9684228591041703, + "grad_norm": 4.38287982213237, + "learning_rate": 5.225435565914483e-08, + "loss": 1.4741, + "step": 5643 + }, + { + "epoch": 0.9685944740003433, + "grad_norm": 4.57070422918699, + "learning_rate": 5.1688405441957525e-08, + "loss": 1.7358, + "step": 5644 + }, + { + "epoch": 0.9687660888965162, + "grad_norm": 6.233663364477786, + "learning_rate": 5.112552880679245e-08, + "loss": 1.5605, + "step": 5645 + }, + { + "epoch": 0.9689377037926892, + "grad_norm": 3.951208875316004, + "learning_rate": 5.0565725927553824e-08, + "loss": 1.4682, + "step": 5646 + }, + { + "epoch": 0.9691093186888622, + "grad_norm": 4.883379189603963, + "learning_rate": 5.000899697719552e-08, + "loss": 1.5666, + "step": 5647 + }, + { + "epoch": 0.9692809335850352, + "grad_norm": 4.299041108648855, + "learning_rate": 4.945534212772107e-08, + "loss": 1.2871, + "step": 5648 + }, + { + "epoch": 0.9694525484812082, + "grad_norm": 4.369021210298357, + "learning_rate": 4.8904761550186975e-08, + "loss": 1.373, + "step": 5649 + }, + { + "epoch": 0.9696241633773811, + "grad_norm": 4.637321801526171, + "learning_rate": 4.835725541469605e-08, + "loss": 1.6171, + "step": 5650 + }, + { + "epoch": 0.9697957782735541, + "grad_norm": 4.04450574766994, + "learning_rate": 4.781282389040409e-08, + "loss": 1.7241, + "step": 5651 + }, + { + "epoch": 0.9699673931697271, + "grad_norm": 4.562322279166501, + "learning_rate": 4.727146714551545e-08, + "loss": 1.6226, + "step": 5652 + }, + { + "epoch": 0.9701390080659001, + "grad_norm": 4.096937042355048, + "learning_rate": 4.673318534728522e-08, + "loss": 1.5468, + "step": 5653 + }, + { + "epoch": 0.9703106229620732, + "grad_norm": 4.197094126313419, + "learning_rate": 4.619797866201814e-08, + "loss": 1.3607, + "step": 5654 + }, + { + "epoch": 0.970482237858246, + "grad_norm": 4.642033432673629, + "learning_rate": 4.566584725507084e-08, + "loss": 1.3341, + "step": 5655 + }, + { + "epoch": 0.9706538527544191, + "grad_norm": 4.367462577968273, + "learning_rate": 4.513679129084514e-08, + "loss": 1.7135, + "step": 5656 + }, + { + "epoch": 0.9708254676505921, + "grad_norm": 5.232433403446701, + "learning_rate": 4.4610810932798065e-08, + "loss": 1.1977, + "step": 5657 + }, + { + "epoch": 0.9709970825467651, + "grad_norm": 5.736348041399838, + "learning_rate": 4.4087906343432963e-08, + "loss": 1.6098, + "step": 5658 + }, + { + "epoch": 0.9711686974429381, + "grad_norm": 3.9031832149356522, + "learning_rate": 4.356807768430282e-08, + "loss": 1.5015, + "step": 5659 + }, + { + "epoch": 0.971340312339111, + "grad_norm": 4.910530433607537, + "learning_rate": 4.3051325116013623e-08, + "loss": 1.569, + "step": 5660 + }, + { + "epoch": 0.971511927235284, + "grad_norm": 4.072917186403384, + "learning_rate": 4.253764879821653e-08, + "loss": 1.306, + "step": 5661 + }, + { + "epoch": 0.971683542131457, + "grad_norm": 4.408817359383308, + "learning_rate": 4.202704888961684e-08, + "loss": 1.4006, + "step": 5662 + }, + { + "epoch": 0.97185515702763, + "grad_norm": 5.17611130219878, + "learning_rate": 4.15195255479639e-08, + "loss": 1.3705, + "step": 5663 + }, + { + "epoch": 0.9720267719238029, + "grad_norm": 4.2267671743095105, + "learning_rate": 4.101507893006229e-08, + "loss": 1.5273, + "step": 5664 + }, + { + "epoch": 0.972198386819976, + "grad_norm": 4.229414580797337, + "learning_rate": 4.05137091917629e-08, + "loss": 1.3282, + "step": 5665 + }, + { + "epoch": 0.972370001716149, + "grad_norm": 4.817358352905265, + "learning_rate": 4.001541648796625e-08, + "loss": 1.4523, + "step": 5666 + }, + { + "epoch": 0.972541616612322, + "grad_norm": 4.265060752973862, + "learning_rate": 3.9520200972621434e-08, + "loss": 1.4072, + "step": 5667 + }, + { + "epoch": 0.972713231508495, + "grad_norm": 4.628357517939475, + "learning_rate": 3.9028062798729395e-08, + "loss": 1.5124, + "step": 5668 + }, + { + "epoch": 0.9728848464046679, + "grad_norm": 3.862514053606767, + "learning_rate": 3.853900211833739e-08, + "loss": 1.3437, + "step": 5669 + }, + { + "epoch": 0.9730564613008409, + "grad_norm": 3.8870141712613506, + "learning_rate": 3.805301908254455e-08, + "loss": 1.5777, + "step": 5670 + }, + { + "epoch": 0.9732280761970139, + "grad_norm": 4.3758078102897064, + "learning_rate": 3.7570113841497446e-08, + "loss": 1.4638, + "step": 5671 + }, + { + "epoch": 0.9733996910931869, + "grad_norm": 5.253787985608671, + "learning_rate": 3.7090286544391174e-08, + "loss": 1.73, + "step": 5672 + }, + { + "epoch": 0.9735713059893599, + "grad_norm": 4.817950438761102, + "learning_rate": 3.6613537339471596e-08, + "loss": 1.4443, + "step": 5673 + }, + { + "epoch": 0.9737429208855328, + "grad_norm": 4.7533919681960635, + "learning_rate": 3.613986637403311e-08, + "loss": 1.3559, + "step": 5674 + }, + { + "epoch": 0.9739145357817058, + "grad_norm": 4.952485798782778, + "learning_rate": 3.5669273794418646e-08, + "loss": 1.3697, + "step": 5675 + }, + { + "epoch": 0.9740861506778788, + "grad_norm": 3.9253608493935936, + "learning_rate": 3.5201759746019695e-08, + "loss": 1.3966, + "step": 5676 + }, + { + "epoch": 0.9742577655740519, + "grad_norm": 5.427079815018357, + "learning_rate": 3.473732437327737e-08, + "loss": 1.4305, + "step": 5677 + }, + { + "epoch": 0.9744293804702249, + "grad_norm": 4.714282667722972, + "learning_rate": 3.427596781968134e-08, + "loss": 1.5055, + "step": 5678 + }, + { + "epoch": 0.9746009953663978, + "grad_norm": 5.180743626011448, + "learning_rate": 3.381769022776982e-08, + "loss": 1.5293, + "step": 5679 + }, + { + "epoch": 0.9747726102625708, + "grad_norm": 5.6085071842119625, + "learning_rate": 3.3362491739131755e-08, + "loss": 1.5025, + "step": 5680 + }, + { + "epoch": 0.9749442251587438, + "grad_norm": 4.040738831024504, + "learning_rate": 3.291037249440132e-08, + "loss": 1.2808, + "step": 5681 + }, + { + "epoch": 0.9751158400549168, + "grad_norm": 4.063090505117431, + "learning_rate": 3.246133263326234e-08, + "loss": 1.5642, + "step": 5682 + }, + { + "epoch": 0.9752874549510897, + "grad_norm": 4.216981942615482, + "learning_rate": 3.201537229445051e-08, + "loss": 1.6496, + "step": 5683 + }, + { + "epoch": 0.9754590698472627, + "grad_norm": 4.525753118471045, + "learning_rate": 3.1572491615745606e-08, + "loss": 1.389, + "step": 5684 + }, + { + "epoch": 0.9756306847434357, + "grad_norm": 4.178939500728314, + "learning_rate": 3.1132690733978175e-08, + "loss": 1.5836, + "step": 5685 + }, + { + "epoch": 0.9758022996396087, + "grad_norm": 4.874749820659827, + "learning_rate": 3.0695969785027316e-08, + "loss": 1.5868, + "step": 5686 + }, + { + "epoch": 0.9759739145357818, + "grad_norm": 4.961118001545191, + "learning_rate": 3.026232890382064e-08, + "loss": 1.4784, + "step": 5687 + }, + { + "epoch": 0.9761455294319547, + "grad_norm": 4.252176699457493, + "learning_rate": 2.98317682243332e-08, + "loss": 1.4968, + "step": 5688 + }, + { + "epoch": 0.9763171443281277, + "grad_norm": 4.829474844550253, + "learning_rate": 2.940428787958749e-08, + "loss": 1.4216, + "step": 5689 + }, + { + "epoch": 0.9764887592243007, + "grad_norm": 4.372735145280348, + "learning_rate": 2.8979888001658963e-08, + "loss": 1.7711, + "step": 5690 + }, + { + "epoch": 0.9766603741204737, + "grad_norm": 4.466729583742108, + "learning_rate": 2.8558568721666068e-08, + "loss": 1.519, + "step": 5691 + }, + { + "epoch": 0.9768319890166467, + "grad_norm": 4.742783426323677, + "learning_rate": 2.814033016977802e-08, + "loss": 1.5538, + "step": 5692 + }, + { + "epoch": 0.9770036039128196, + "grad_norm": 3.7933557580084694, + "learning_rate": 2.7725172475211447e-08, + "loss": 1.3787, + "step": 5693 + }, + { + "epoch": 0.9771752188089926, + "grad_norm": 4.717837072080277, + "learning_rate": 2.7313095766230423e-08, + "loss": 1.5531, + "step": 5694 + }, + { + "epoch": 0.9773468337051656, + "grad_norm": 4.833001945925235, + "learning_rate": 2.6904100170150883e-08, + "loss": 1.2479, + "step": 5695 + }, + { + "epoch": 0.9775184486013386, + "grad_norm": 5.626134413663522, + "learning_rate": 2.6498185813332854e-08, + "loss": 1.4713, + "step": 5696 + }, + { + "epoch": 0.9776900634975115, + "grad_norm": 4.736983593289776, + "learning_rate": 2.6095352821184916e-08, + "loss": 1.5569, + "step": 5697 + }, + { + "epoch": 0.9778616783936845, + "grad_norm": 4.336991277328159, + "learning_rate": 2.5695601318165287e-08, + "loss": 1.3707, + "step": 5698 + }, + { + "epoch": 0.9780332932898576, + "grad_norm": 4.205624960758902, + "learning_rate": 2.5298931427777395e-08, + "loss": 1.6069, + "step": 5699 + }, + { + "epoch": 0.9782049081860306, + "grad_norm": 5.009211819509579, + "learning_rate": 2.4905343272577655e-08, + "loss": 1.7994, + "step": 5700 + }, + { + "epoch": 0.9783765230822036, + "grad_norm": 5.352526680031248, + "learning_rate": 2.4514836974165458e-08, + "loss": 1.4851, + "step": 5701 + }, + { + "epoch": 0.9785481379783765, + "grad_norm": 3.9432798655582304, + "learning_rate": 2.4127412653188743e-08, + "loss": 1.157, + "step": 5702 + }, + { + "epoch": 0.9787197528745495, + "grad_norm": 4.0367487302013485, + "learning_rate": 2.3743070429345095e-08, + "loss": 1.3278, + "step": 5703 + }, + { + "epoch": 0.9788913677707225, + "grad_norm": 4.554090750731708, + "learning_rate": 2.336181042137953e-08, + "loss": 1.3468, + "step": 5704 + }, + { + "epoch": 0.9790629826668955, + "grad_norm": 4.195006522439029, + "learning_rate": 2.2983632747084483e-08, + "loss": 1.5046, + "step": 5705 + }, + { + "epoch": 0.9792345975630685, + "grad_norm": 4.6361011590546815, + "learning_rate": 2.2608537523298724e-08, + "loss": 1.5538, + "step": 5706 + }, + { + "epoch": 0.9794062124592414, + "grad_norm": 4.364224186014652, + "learning_rate": 2.2236524865910658e-08, + "loss": 1.5663, + "step": 5707 + }, + { + "epoch": 0.9795778273554144, + "grad_norm": 3.962000859663549, + "learning_rate": 2.1867594889855016e-08, + "loss": 1.4628, + "step": 5708 + }, + { + "epoch": 0.9797494422515874, + "grad_norm": 5.393321284172536, + "learning_rate": 2.150174770911506e-08, + "loss": 1.4604, + "step": 5709 + }, + { + "epoch": 0.9799210571477605, + "grad_norm": 6.0966067474000605, + "learning_rate": 2.1138983436721495e-08, + "loss": 1.7479, + "step": 5710 + }, + { + "epoch": 0.9800926720439335, + "grad_norm": 4.64818726595364, + "learning_rate": 2.0779302184751326e-08, + "loss": 1.5483, + "step": 5711 + }, + { + "epoch": 0.9802642869401064, + "grad_norm": 4.056208246221719, + "learning_rate": 2.0422704064330113e-08, + "loss": 1.6097, + "step": 5712 + }, + { + "epoch": 0.9804359018362794, + "grad_norm": 3.7183952983824478, + "learning_rate": 2.0069189185630834e-08, + "loss": 1.5155, + "step": 5713 + }, + { + "epoch": 0.9806075167324524, + "grad_norm": 4.2485997105560465, + "learning_rate": 1.9718757657873898e-08, + "loss": 1.2681, + "step": 5714 + }, + { + "epoch": 0.9807791316286254, + "grad_norm": 3.9133277974927365, + "learning_rate": 1.937140958932604e-08, + "loss": 1.4072, + "step": 5715 + }, + { + "epoch": 0.9809507465247983, + "grad_norm": 4.463848783905341, + "learning_rate": 1.9027145087303635e-08, + "loss": 1.3942, + "step": 5716 + }, + { + "epoch": 0.9811223614209713, + "grad_norm": 5.331659535666475, + "learning_rate": 1.8685964258168265e-08, + "loss": 1.6365, + "step": 5717 + }, + { + "epoch": 0.9812939763171443, + "grad_norm": 5.0894895373248215, + "learning_rate": 1.834786720733006e-08, + "loss": 1.2009, + "step": 5718 + }, + { + "epoch": 0.9814655912133173, + "grad_norm": 5.975640074130434, + "learning_rate": 1.8012854039244354e-08, + "loss": 1.5197, + "step": 5719 + }, + { + "epoch": 0.9816372061094903, + "grad_norm": 4.888002329586236, + "learning_rate": 1.7680924857417237e-08, + "loss": 1.4681, + "step": 5720 + }, + { + "epoch": 0.9818088210056632, + "grad_norm": 4.271523709414777, + "learning_rate": 1.735207976439779e-08, + "loss": 1.3427, + "step": 5721 + }, + { + "epoch": 0.9819804359018363, + "grad_norm": 4.245891997905698, + "learning_rate": 1.702631886178585e-08, + "loss": 1.5707, + "step": 5722 + }, + { + "epoch": 0.9821520507980093, + "grad_norm": 4.5541735441126905, + "learning_rate": 1.6703642250227582e-08, + "loss": 1.4318, + "step": 5723 + }, + { + "epoch": 0.9823236656941823, + "grad_norm": 4.508941451792651, + "learning_rate": 1.6384050029413235e-08, + "loss": 1.6497, + "step": 5724 + }, + { + "epoch": 0.9824952805903553, + "grad_norm": 5.930182070393091, + "learning_rate": 1.6067542298083826e-08, + "loss": 1.3516, + "step": 5725 + }, + { + "epoch": 0.9826668954865282, + "grad_norm": 4.431456946840624, + "learning_rate": 1.5754119154026693e-08, + "loss": 1.5274, + "step": 5726 + }, + { + "epoch": 0.9828385103827012, + "grad_norm": 4.764582813739088, + "learning_rate": 1.5443780694074373e-08, + "loss": 1.6669, + "step": 5727 + }, + { + "epoch": 0.9830101252788742, + "grad_norm": 5.3877084889889515, + "learning_rate": 1.513652701410795e-08, + "loss": 1.5626, + "step": 5728 + }, + { + "epoch": 0.9831817401750472, + "grad_norm": 4.056051069896262, + "learning_rate": 1.483235820905593e-08, + "loss": 1.4734, + "step": 5729 + }, + { + "epoch": 0.9833533550712202, + "grad_norm": 4.569765400541128, + "learning_rate": 1.4531274372890925e-08, + "loss": 1.5172, + "step": 5730 + }, + { + "epoch": 0.9835249699673931, + "grad_norm": 4.076733099517534, + "learning_rate": 1.4233275598635188e-08, + "loss": 1.412, + "step": 5731 + }, + { + "epoch": 0.9836965848635661, + "grad_norm": 4.8909774703152396, + "learning_rate": 1.3938361978358406e-08, + "loss": 1.6001, + "step": 5732 + }, + { + "epoch": 0.9838681997597392, + "grad_norm": 4.349696469923076, + "learning_rate": 1.3646533603173252e-08, + "loss": 1.3631, + "step": 5733 + }, + { + "epoch": 0.9840398146559122, + "grad_norm": 5.019159936530047, + "learning_rate": 1.3357790563243156e-08, + "loss": 1.485, + "step": 5734 + }, + { + "epoch": 0.9842114295520851, + "grad_norm": 5.361966139277498, + "learning_rate": 1.3072132947776762e-08, + "loss": 1.5821, + "step": 5735 + }, + { + "epoch": 0.9843830444482581, + "grad_norm": 5.066564835395103, + "learning_rate": 1.2789560845029026e-08, + "loss": 1.5014, + "step": 5736 + }, + { + "epoch": 0.9845546593444311, + "grad_norm": 3.947854313608584, + "learning_rate": 1.2510074342301225e-08, + "loss": 1.4026, + "step": 5737 + }, + { + "epoch": 0.9847262742406041, + "grad_norm": 4.89044729056931, + "learning_rate": 1.2233673525943179e-08, + "loss": 1.4504, + "step": 5738 + }, + { + "epoch": 0.9848978891367771, + "grad_norm": 4.717725721957725, + "learning_rate": 1.196035848135102e-08, + "loss": 1.4044, + "step": 5739 + }, + { + "epoch": 0.98506950403295, + "grad_norm": 4.9418648925350785, + "learning_rate": 1.169012929296609e-08, + "loss": 1.6687, + "step": 5740 + }, + { + "epoch": 0.985241118929123, + "grad_norm": 4.180740359090647, + "learning_rate": 1.1422986044276051e-08, + "loss": 1.5111, + "step": 5741 + }, + { + "epoch": 0.985412733825296, + "grad_norm": 5.175074666934108, + "learning_rate": 1.1158928817817104e-08, + "loss": 1.5831, + "step": 5742 + }, + { + "epoch": 0.985584348721469, + "grad_norm": 4.412875403121942, + "learning_rate": 1.0897957695171767e-08, + "loss": 1.451, + "step": 5743 + }, + { + "epoch": 0.9857559636176421, + "grad_norm": 4.077973343323954, + "learning_rate": 1.0640072756967768e-08, + "loss": 1.3742, + "step": 5744 + }, + { + "epoch": 0.985927578513815, + "grad_norm": 4.4413614868434665, + "learning_rate": 1.038527408288026e-08, + "loss": 1.3252, + "step": 5745 + }, + { + "epoch": 0.986099193409988, + "grad_norm": 4.215948662693966, + "learning_rate": 1.0133561751629605e-08, + "loss": 1.4889, + "step": 5746 + }, + { + "epoch": 0.986270808306161, + "grad_norm": 5.365747097472438, + "learning_rate": 9.884935840984711e-09, + "loss": 1.6181, + "step": 5747 + }, + { + "epoch": 0.986442423202334, + "grad_norm": 3.9984124006778257, + "learning_rate": 9.639396427759684e-09, + "loss": 1.3671, + "step": 5748 + }, + { + "epoch": 0.9866140380985069, + "grad_norm": 3.8998902758765883, + "learning_rate": 9.396943587813844e-09, + "loss": 1.4449, + "step": 5749 + }, + { + "epoch": 0.9867856529946799, + "grad_norm": 4.888190120951397, + "learning_rate": 9.157577396056161e-09, + "loss": 1.5563, + "step": 5750 + }, + { + "epoch": 0.9869572678908529, + "grad_norm": 4.64076007560539, + "learning_rate": 8.921297926439698e-09, + "loss": 1.4811, + "step": 5751 + }, + { + "epoch": 0.9871288827870259, + "grad_norm": 4.37464919950965, + "learning_rate": 8.68810525196273e-09, + "loss": 1.484, + "step": 5752 + }, + { + "epoch": 0.9873004976831989, + "grad_norm": 5.055811491546965, + "learning_rate": 8.457999444673181e-09, + "loss": 1.5054, + "step": 5753 + }, + { + "epoch": 0.9874721125793718, + "grad_norm": 5.008744153516659, + "learning_rate": 8.230980575661962e-09, + "loss": 1.3461, + "step": 5754 + }, + { + "epoch": 0.9876437274755449, + "grad_norm": 4.4398751615219965, + "learning_rate": 8.007048715068522e-09, + "loss": 1.5646, + "step": 5755 + }, + { + "epoch": 0.9878153423717179, + "grad_norm": 4.034380623128741, + "learning_rate": 7.786203932078628e-09, + "loss": 1.2722, + "step": 5756 + }, + { + "epoch": 0.9879869572678909, + "grad_norm": 5.089243824954915, + "learning_rate": 7.568446294921039e-09, + "loss": 1.6698, + "step": 5757 + }, + { + "epoch": 0.9881585721640639, + "grad_norm": 6.214270049157232, + "learning_rate": 7.353775870875268e-09, + "loss": 1.5897, + "step": 5758 + }, + { + "epoch": 0.9883301870602368, + "grad_norm": 4.3666436609758215, + "learning_rate": 7.142192726263819e-09, + "loss": 1.5412, + "step": 5759 + }, + { + "epoch": 0.9885018019564098, + "grad_norm": 4.281301235759054, + "learning_rate": 6.933696926456624e-09, + "loss": 1.4412, + "step": 5760 + }, + { + "epoch": 0.9886734168525828, + "grad_norm": 4.6665979094306005, + "learning_rate": 6.728288535868821e-09, + "loss": 1.4537, + "step": 5761 + }, + { + "epoch": 0.9888450317487558, + "grad_norm": 4.898945659991402, + "learning_rate": 6.525967617964091e-09, + "loss": 1.3742, + "step": 5762 + }, + { + "epoch": 0.9890166466449288, + "grad_norm": 3.9163507060149705, + "learning_rate": 6.326734235249099e-09, + "loss": 1.5333, + "step": 5763 + }, + { + "epoch": 0.9891882615411017, + "grad_norm": 4.747806602033526, + "learning_rate": 6.130588449277941e-09, + "loss": 1.553, + "step": 5764 + }, + { + "epoch": 0.9893598764372747, + "grad_norm": 4.461696847607399, + "learning_rate": 5.9375303206521406e-09, + "loss": 1.7084, + "step": 5765 + }, + { + "epoch": 0.9895314913334478, + "grad_norm": 4.288740577237806, + "learning_rate": 5.747559909016209e-09, + "loss": 1.4774, + "step": 5766 + }, + { + "epoch": 0.9897031062296208, + "grad_norm": 4.189912234005733, + "learning_rate": 5.5606772730643074e-09, + "loss": 1.484, + "step": 5767 + }, + { + "epoch": 0.9898747211257937, + "grad_norm": 4.53852672439193, + "learning_rate": 5.376882470533585e-09, + "loss": 1.7763, + "step": 5768 + }, + { + "epoch": 0.9900463360219667, + "grad_norm": 4.275803342711082, + "learning_rate": 5.196175558209727e-09, + "loss": 1.3581, + "step": 5769 + }, + { + "epoch": 0.9902179509181397, + "grad_norm": 4.9091876608330605, + "learning_rate": 5.018556591921409e-09, + "loss": 1.4312, + "step": 5770 + }, + { + "epoch": 0.9903895658143127, + "grad_norm": 4.107493584968732, + "learning_rate": 4.844025626544735e-09, + "loss": 1.3582, + "step": 5771 + }, + { + "epoch": 0.9905611807104857, + "grad_norm": 3.7564380181909702, + "learning_rate": 4.672582716004348e-09, + "loss": 1.1795, + "step": 5772 + }, + { + "epoch": 0.9907327956066586, + "grad_norm": 4.412765304725561, + "learning_rate": 4.504227913265658e-09, + "loss": 1.6317, + "step": 5773 + }, + { + "epoch": 0.9909044105028316, + "grad_norm": 5.24320877700556, + "learning_rate": 4.338961270344832e-09, + "loss": 1.4063, + "step": 5774 + }, + { + "epoch": 0.9910760253990046, + "grad_norm": 5.178813482280333, + "learning_rate": 4.17678283830103e-09, + "loss": 1.4973, + "step": 5775 + }, + { + "epoch": 0.9912476402951776, + "grad_norm": 4.381699413398468, + "learning_rate": 4.017692667239726e-09, + "loss": 1.379, + "step": 5776 + }, + { + "epoch": 0.9914192551913507, + "grad_norm": 4.730069800027837, + "learning_rate": 3.861690806312712e-09, + "loss": 1.4277, + "step": 5777 + }, + { + "epoch": 0.9915908700875236, + "grad_norm": 4.374380855516868, + "learning_rate": 3.708777303718103e-09, + "loss": 1.72, + "step": 5778 + }, + { + "epoch": 0.9917624849836966, + "grad_norm": 4.196938287157688, + "learning_rate": 3.5589522066992178e-09, + "loss": 1.5164, + "step": 5779 + }, + { + "epoch": 0.9919340998798696, + "grad_norm": 4.305441793976212, + "learning_rate": 3.4122155615456954e-09, + "loss": 1.5696, + "step": 5780 + }, + { + "epoch": 0.9921057147760426, + "grad_norm": 4.294337267009494, + "learning_rate": 3.2685674135912727e-09, + "loss": 1.6286, + "step": 5781 + }, + { + "epoch": 0.9922773296722156, + "grad_norm": 4.188989705906731, + "learning_rate": 3.128007807217115e-09, + "loss": 1.4182, + "step": 5782 + }, + { + "epoch": 0.9924489445683885, + "grad_norm": 5.3859698793037225, + "learning_rate": 2.990536785850706e-09, + "loss": 1.4849, + "step": 5783 + }, + { + "epoch": 0.9926205594645615, + "grad_norm": 4.107292820838975, + "learning_rate": 2.8561543919647382e-09, + "loss": 1.5823, + "step": 5784 + }, + { + "epoch": 0.9927921743607345, + "grad_norm": 3.793923421622299, + "learning_rate": 2.7248606670760012e-09, + "loss": 1.3456, + "step": 5785 + }, + { + "epoch": 0.9929637892569075, + "grad_norm": 4.144068948793065, + "learning_rate": 2.596655651748714e-09, + "loss": 1.641, + "step": 5786 + }, + { + "epoch": 0.9931354041530804, + "grad_norm": 4.619523835567354, + "learning_rate": 2.471539385592303e-09, + "loss": 1.5316, + "step": 5787 + }, + { + "epoch": 0.9933070190492534, + "grad_norm": 4.407734077777609, + "learning_rate": 2.349511907263624e-09, + "loss": 1.536, + "step": 5788 + }, + { + "epoch": 0.9934786339454265, + "grad_norm": 4.236732643427535, + "learning_rate": 2.23057325446252e-09, + "loss": 1.2486, + "step": 5789 + }, + { + "epoch": 0.9936502488415995, + "grad_norm": 4.524850324515112, + "learning_rate": 2.1147234639351534e-09, + "loss": 1.5897, + "step": 5790 + }, + { + "epoch": 0.9938218637377725, + "grad_norm": 4.425977313268584, + "learning_rate": 2.0019625714740032e-09, + "loss": 1.4732, + "step": 5791 + }, + { + "epoch": 0.9939934786339454, + "grad_norm": 4.938136277441804, + "learning_rate": 1.8922906119178686e-09, + "loss": 1.5872, + "step": 5792 + }, + { + "epoch": 0.9941650935301184, + "grad_norm": 4.569109669252175, + "learning_rate": 1.7857076191507562e-09, + "loss": 1.6914, + "step": 5793 + }, + { + "epoch": 0.9943367084262914, + "grad_norm": 4.333319479768491, + "learning_rate": 1.6822136261018807e-09, + "loss": 1.6915, + "step": 5794 + }, + { + "epoch": 0.9945083233224644, + "grad_norm": 4.840521875000851, + "learning_rate": 1.5818086647445551e-09, + "loss": 1.6062, + "step": 5795 + }, + { + "epoch": 0.9946799382186374, + "grad_norm": 4.108093311140296, + "learning_rate": 1.4844927661017416e-09, + "loss": 1.4952, + "step": 5796 + }, + { + "epoch": 0.9948515531148103, + "grad_norm": 4.924556212858059, + "learning_rate": 1.3902659602382795e-09, + "loss": 1.4156, + "step": 5797 + }, + { + "epoch": 0.9950231680109833, + "grad_norm": 3.7023606293630182, + "learning_rate": 1.299128276266437e-09, + "loss": 1.3734, + "step": 5798 + }, + { + "epoch": 0.9951947829071564, + "grad_norm": 4.253331909254967, + "learning_rate": 1.2110797423436903e-09, + "loss": 1.3812, + "step": 5799 + }, + { + "epoch": 0.9953663978033294, + "grad_norm": 5.31903207893972, + "learning_rate": 1.1261203856727243e-09, + "loss": 1.6717, + "step": 5800 + }, + { + "epoch": 0.9955380126995023, + "grad_norm": 4.373451600826322, + "learning_rate": 1.0442502325025417e-09, + "loss": 1.7196, + "step": 5801 + }, + { + "epoch": 0.9957096275956753, + "grad_norm": 4.10867964278865, + "learning_rate": 9.654693081273537e-10, + "loss": 1.3952, + "step": 5802 + }, + { + "epoch": 0.9958812424918483, + "grad_norm": 4.555893974185902, + "learning_rate": 8.897776368865796e-10, + "loss": 1.3633, + "step": 5803 + }, + { + "epoch": 0.9960528573880213, + "grad_norm": 3.8567048783853206, + "learning_rate": 8.171752421670676e-10, + "loss": 1.5258, + "step": 5804 + }, + { + "epoch": 0.9962244722841943, + "grad_norm": 4.438392567757818, + "learning_rate": 7.476621463975431e-10, + "loss": 1.577, + "step": 5805 + }, + { + "epoch": 0.9963960871803672, + "grad_norm": 4.957109327806115, + "learning_rate": 6.812383710552706e-10, + "loss": 1.4023, + "step": 5806 + }, + { + "epoch": 0.9965677020765402, + "grad_norm": 4.601743178471645, + "learning_rate": 6.179039366616125e-10, + "loss": 1.4835, + "step": 5807 + }, + { + "epoch": 0.9967393169727132, + "grad_norm": 4.128581428788443, + "learning_rate": 5.576588627853596e-10, + "loss": 1.3219, + "step": 5808 + }, + { + "epoch": 0.9969109318688862, + "grad_norm": 3.6985861575738586, + "learning_rate": 5.005031680394012e-10, + "loss": 1.2912, + "step": 5809 + }, + { + "epoch": 0.9970825467650593, + "grad_norm": 4.224786090285248, + "learning_rate": 4.4643687008183445e-10, + "loss": 1.275, + "step": 5810 + }, + { + "epoch": 0.9972541616612322, + "grad_norm": 4.841573319769585, + "learning_rate": 3.954599856159647e-10, + "loss": 1.8621, + "step": 5811 + }, + { + "epoch": 0.9974257765574052, + "grad_norm": 6.116871958726782, + "learning_rate": 3.475725303925259e-10, + "loss": 1.531, + "step": 5812 + }, + { + "epoch": 0.9975973914535782, + "grad_norm": 4.581249786183579, + "learning_rate": 3.0277451920635024e-10, + "loss": 1.5031, + "step": 5813 + }, + { + "epoch": 0.9977690063497512, + "grad_norm": 4.707212027960239, + "learning_rate": 2.610659658974779e-10, + "loss": 1.6873, + "step": 5814 + }, + { + "epoch": 0.9979406212459242, + "grad_norm": 4.3219901955773565, + "learning_rate": 2.2244688335226749e-10, + "loss": 1.4523, + "step": 5815 + }, + { + "epoch": 0.9981122361420971, + "grad_norm": 4.552469182606364, + "learning_rate": 1.8691728350228589e-10, + "loss": 1.3889, + "step": 5816 + }, + { + "epoch": 0.9982838510382701, + "grad_norm": 5.152270907638353, + "learning_rate": 1.5447717732541834e-10, + "loss": 1.344, + "step": 5817 + }, + { + "epoch": 0.9984554659344431, + "grad_norm": 3.785638594551996, + "learning_rate": 1.2512657484253788e-10, + "loss": 1.4309, + "step": 5818 + }, + { + "epoch": 0.9986270808306161, + "grad_norm": 5.645873329631196, + "learning_rate": 9.886548512305638e-11, + "loss": 1.6786, + "step": 5819 + }, + { + "epoch": 0.998798695726789, + "grad_norm": 4.406337412937685, + "learning_rate": 7.569391628048372e-11, + "loss": 1.3381, + "step": 5820 + }, + { + "epoch": 0.998970310622962, + "grad_norm": 4.677360176059494, + "learning_rate": 5.5611875472427746e-11, + "loss": 1.7457, + "step": 5821 + }, + { + "epoch": 0.999141925519135, + "grad_norm": 3.9042113518042725, + "learning_rate": 3.8619368903924926e-11, + "loss": 1.4341, + "step": 5822 + }, + { + "epoch": 0.9993135404153081, + "grad_norm": 3.6987967483322937, + "learning_rate": 2.471640182633017e-11, + "loss": 1.238, + "step": 5823 + }, + { + "epoch": 0.9994851553114811, + "grad_norm": 5.509262200567748, + "learning_rate": 1.390297853287592e-11, + "loss": 1.5012, + "step": 5824 + }, + { + "epoch": 0.999656770207654, + "grad_norm": 3.9329386319957775, + "learning_rate": 6.179102365333478e-12, + "loss": 1.386, + "step": 5825 + }, + { + "epoch": 0.999828385103827, + "grad_norm": 5.197591851477818, + "learning_rate": 1.5447757106823447e-12, + "loss": 1.6331, + "step": 5826 + }, + { + "epoch": 1.0, + "grad_norm": 4.3208345121338105, + "learning_rate": 0.0, + "loss": 1.3919, + "step": 5827 + }, + { + "epoch": 1.0, + "step": 5827, + "total_flos": 1.29494026024072e+18, + "train_loss": 1.686343349435838, + "train_runtime": 157705.4771, + "train_samples_per_second": 2.364, + "train_steps_per_second": 0.037 + } + ], + "logging_steps": 1.0, + "max_steps": 5827, + "num_input_tokens_seen": 0, + "num_train_epochs": 1, + "save_steps": 500, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 1.29494026024072e+18, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +}