|
{ |
|
"best_metric": 0.24766209564477684, |
|
"best_model_checkpoint": "t5-results/checkpoint-2552", |
|
"epoch": 8.0, |
|
"global_step": 2552, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.996865203761756e-05, |
|
"loss": 0.9455, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.993730407523511e-05, |
|
"loss": 1.6682, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9905956112852665e-05, |
|
"loss": 1.8287, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.987460815047022e-05, |
|
"loss": 0.4417, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9843260188087774e-05, |
|
"loss": 0.4163, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9811912225705335e-05, |
|
"loss": 2.1209, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.978056426332289e-05, |
|
"loss": 0.2798, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9749216300940444e-05, |
|
"loss": 1.947, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.971786833855799e-05, |
|
"loss": 1.2442, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.968652037617555e-05, |
|
"loss": 0.3761, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9655172413793107e-05, |
|
"loss": 0.9298, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.962382445141066e-05, |
|
"loss": 1.3809, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9592476489028215e-05, |
|
"loss": 0.3682, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.956112852664577e-05, |
|
"loss": 0.2959, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9529780564263324e-05, |
|
"loss": 0.2546, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.949843260188088e-05, |
|
"loss": 0.4341, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.946708463949843e-05, |
|
"loss": 0.1989, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9435736677115994e-05, |
|
"loss": 0.2539, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.940438871473355e-05, |
|
"loss": 0.7307, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.93730407523511e-05, |
|
"loss": 0.8249, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.934169278996865e-05, |
|
"loss": 0.7287, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.931034482758621e-05, |
|
"loss": 0.3686, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.9278996865203765e-05, |
|
"loss": 1.3282, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.924764890282132e-05, |
|
"loss": 0.4953, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9216300940438874e-05, |
|
"loss": 0.2742, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.918495297805643e-05, |
|
"loss": 0.6305, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.915360501567398e-05, |
|
"loss": 1.459, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.9122257053291537e-05, |
|
"loss": 1.2348, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.909090909090909e-05, |
|
"loss": 1.0512, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.905956112852665e-05, |
|
"loss": 1.0997, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.9028213166144206e-05, |
|
"loss": 0.2254, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.899686520376176e-05, |
|
"loss": 0.5962, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.896551724137931e-05, |
|
"loss": 0.94, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.893416927899687e-05, |
|
"loss": 1.0733, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.8902821316614424e-05, |
|
"loss": 0.6645, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.887147335423198e-05, |
|
"loss": 1.1294, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.884012539184953e-05, |
|
"loss": 1.0914, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.8808777429467086e-05, |
|
"loss": 0.261, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.877742946708464e-05, |
|
"loss": 0.2536, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.8746081504702195e-05, |
|
"loss": 0.5267, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.871473354231975e-05, |
|
"loss": 0.4753, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.868338557993731e-05, |
|
"loss": 1.0177, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.8652037617554865e-05, |
|
"loss": 0.2379, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.862068965517241e-05, |
|
"loss": 0.495, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.858934169278997e-05, |
|
"loss": 0.1808, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.855799373040753e-05, |
|
"loss": 0.2945, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.852664576802508e-05, |
|
"loss": 0.6358, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.8495297805642636e-05, |
|
"loss": 0.6127, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.846394984326019e-05, |
|
"loss": 0.4608, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.8432601880877745e-05, |
|
"loss": 0.7172, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.84012539184953e-05, |
|
"loss": 0.1879, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.8369905956112854e-05, |
|
"loss": 1.0405, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.833855799373041e-05, |
|
"loss": 0.6024, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.830721003134797e-05, |
|
"loss": 1.1472, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.827586206896552e-05, |
|
"loss": 0.7647, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.824451410658307e-05, |
|
"loss": 0.9441, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.8213166144200625e-05, |
|
"loss": 1.0265, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.8181818181818186e-05, |
|
"loss": 0.1369, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.815047021943574e-05, |
|
"loss": 0.6012, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.8119122257053295e-05, |
|
"loss": 1.6127, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.808777429467085e-05, |
|
"loss": 1.2571, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.8056426332288403e-05, |
|
"loss": 0.8723, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.802507836990596e-05, |
|
"loss": 0.8984, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.799373040752351e-05, |
|
"loss": 1.1541, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.7962382445141066e-05, |
|
"loss": 0.3226, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.793103448275863e-05, |
|
"loss": 0.2267, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.789968652037618e-05, |
|
"loss": 0.7902, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.786833855799373e-05, |
|
"loss": 1.8272, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.7836990595611284e-05, |
|
"loss": 0.396, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.7805642633228845e-05, |
|
"loss": 0.6937, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.77742946708464e-05, |
|
"loss": 1.0419, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.774294670846395e-05, |
|
"loss": 0.413, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.771159874608151e-05, |
|
"loss": 1.9694, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.768025078369906e-05, |
|
"loss": 0.3958, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.7648902821316616e-05, |
|
"loss": 0.2723, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.761755485893417e-05, |
|
"loss": 0.2836, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.7586206896551725e-05, |
|
"loss": 0.2555, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.7554858934169286e-05, |
|
"loss": 0.2959, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.752351097178684e-05, |
|
"loss": 0.1831, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.749216300940439e-05, |
|
"loss": 0.5239, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.746081504702194e-05, |
|
"loss": 0.5796, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.74294670846395e-05, |
|
"loss": 0.993, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.739811912225706e-05, |
|
"loss": 0.8317, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.736677115987461e-05, |
|
"loss": 2.053, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.7335423197492166e-05, |
|
"loss": 1.1727, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.730407523510972e-05, |
|
"loss": 0.3161, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.7272727272727275e-05, |
|
"loss": 1.2573, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.724137931034483e-05, |
|
"loss": 1.0586, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.721003134796238e-05, |
|
"loss": 0.3037, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.7178683385579944e-05, |
|
"loss": 0.6061, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.714733542319749e-05, |
|
"loss": 0.3963, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.7115987460815046e-05, |
|
"loss": 0.8247, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.70846394984326e-05, |
|
"loss": 1.1442, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.705329153605016e-05, |
|
"loss": 0.6141, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.7021943573667716e-05, |
|
"loss": 0.3434, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.699059561128527e-05, |
|
"loss": 0.7776, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.695924764890282e-05, |
|
"loss": 0.7379, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.692789968652038e-05, |
|
"loss": 0.855, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.689655172413793e-05, |
|
"loss": 0.1466, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.686520376175549e-05, |
|
"loss": 0.3229, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.683385579937304e-05, |
|
"loss": 0.2641, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.68025078369906e-05, |
|
"loss": 0.1557, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.677115987460815e-05, |
|
"loss": 0.4341, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.6739811912225705e-05, |
|
"loss": 1.5467, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.670846394984326e-05, |
|
"loss": 1.9732, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.667711598746082e-05, |
|
"loss": 0.5423, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.6645768025078374e-05, |
|
"loss": 1.1553, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.661442006269593e-05, |
|
"loss": 0.2762, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.6583072100313476e-05, |
|
"loss": 0.5309, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.655172413793104e-05, |
|
"loss": 0.7831, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.652037617554859e-05, |
|
"loss": 0.5252, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.6489028213166146e-05, |
|
"loss": 0.8883, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.64576802507837e-05, |
|
"loss": 0.9255, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.642633228840126e-05, |
|
"loss": 0.619, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.639498432601881e-05, |
|
"loss": 0.1687, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.636363636363636e-05, |
|
"loss": 0.1271, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.633228840125392e-05, |
|
"loss": 0.2336, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.630094043887148e-05, |
|
"loss": 0.6617, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.626959247648903e-05, |
|
"loss": 0.2228, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.623824451410659e-05, |
|
"loss": 0.1894, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.6206896551724135e-05, |
|
"loss": 0.235, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.6175548589341696e-05, |
|
"loss": 0.4784, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.614420062695925e-05, |
|
"loss": 0.1157, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.6112852664576805e-05, |
|
"loss": 0.3989, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.608150470219436e-05, |
|
"loss": 0.8925, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.605015673981192e-05, |
|
"loss": 0.2321, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.601880877742947e-05, |
|
"loss": 0.679, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.598746081504702e-05, |
|
"loss": 0.2459, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.5956112852664576e-05, |
|
"loss": 1.0986, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.592476489028214e-05, |
|
"loss": 0.8269, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.589341692789969e-05, |
|
"loss": 1.0066, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.586206896551724e-05, |
|
"loss": 0.4048, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.583072100313479e-05, |
|
"loss": 0.9815, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.5799373040752354e-05, |
|
"loss": 0.2814, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.576802507836991e-05, |
|
"loss": 0.169, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.573667711598746e-05, |
|
"loss": 0.7363, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.570532915360502e-05, |
|
"loss": 0.1314, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.567398119122257e-05, |
|
"loss": 0.6374, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.5642633228840126e-05, |
|
"loss": 0.9378, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.561128526645768e-05, |
|
"loss": 0.3465, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.5579937304075235e-05, |
|
"loss": 0.1601, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.5548589341692796e-05, |
|
"loss": 0.318, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.551724137931035e-05, |
|
"loss": 0.9829, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.54858934169279e-05, |
|
"loss": 0.4091, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.545454545454546e-05, |
|
"loss": 0.1917, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.542319749216301e-05, |
|
"loss": 0.3441, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.539184952978057e-05, |
|
"loss": 0.7346, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.536050156739812e-05, |
|
"loss": 0.9984, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.5329153605015676e-05, |
|
"loss": 0.8257, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.529780564263323e-05, |
|
"loss": 0.265, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.5266457680250784e-05, |
|
"loss": 0.4272, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.523510971786834e-05, |
|
"loss": 0.4186, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.520376175548589e-05, |
|
"loss": 0.2057, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.5172413793103454e-05, |
|
"loss": 1.2554, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.514106583072101e-05, |
|
"loss": 0.8404, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.5109717868338556e-05, |
|
"loss": 0.2499, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.507836990595612e-05, |
|
"loss": 1.1946, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.504702194357367e-05, |
|
"loss": 0.3571, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.5015673981191226e-05, |
|
"loss": 1.0764, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_bleu": 0.2125382422737567, |
|
"eval_loss": 0.5874338746070862, |
|
"eval_runtime": 76.4643, |
|
"eval_samples_per_second": 4.211, |
|
"eval_steps_per_second": 1.059, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.498432601880878e-05, |
|
"loss": 1.09, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4952978056426334e-05, |
|
"loss": 0.3573, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.492163009404389e-05, |
|
"loss": 0.8303, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.489028213166144e-05, |
|
"loss": 0.1176, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.4858934169279e-05, |
|
"loss": 0.0794, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.482758620689655e-05, |
|
"loss": 1.3543, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.479623824451411e-05, |
|
"loss": 0.1834, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.476489028213167e-05, |
|
"loss": 0.9918, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.4733542319749214e-05, |
|
"loss": 0.1363, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.4702194357366776e-05, |
|
"loss": 0.7989, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.467084639498433e-05, |
|
"loss": 0.9555, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.4639498432601884e-05, |
|
"loss": 0.1831, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.460815047021944e-05, |
|
"loss": 0.2329, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.457680250783699e-05, |
|
"loss": 0.4434, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.454545454545455e-05, |
|
"loss": 0.1666, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.45141065830721e-05, |
|
"loss": 0.2148, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.4482758620689656e-05, |
|
"loss": 1.0281, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.445141065830721e-05, |
|
"loss": 0.3049, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.442006269592477e-05, |
|
"loss": 1.0153, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.438871473354232e-05, |
|
"loss": 0.1113, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.435736677115987e-05, |
|
"loss": 0.5987, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.4326018808777434e-05, |
|
"loss": 0.1614, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.429467084639499e-05, |
|
"loss": 0.7884, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.426332288401254e-05, |
|
"loss": 1.1267, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.42319749216301e-05, |
|
"loss": 0.1548, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.420062695924765e-05, |
|
"loss": 0.3899, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.4169278996865206e-05, |
|
"loss": 0.3485, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.413793103448276e-05, |
|
"loss": 0.4649, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.4106583072100314e-05, |
|
"loss": 0.2558, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.407523510971787e-05, |
|
"loss": 0.4263, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.404388714733543e-05, |
|
"loss": 0.3739, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.401253918495298e-05, |
|
"loss": 0.5223, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.398119122257053e-05, |
|
"loss": 0.1385, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.394984326018809e-05, |
|
"loss": 0.4854, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.391849529780565e-05, |
|
"loss": 0.9741, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.38871473354232e-05, |
|
"loss": 1.0183, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.3855799373040755e-05, |
|
"loss": 1.0496, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.382445141065831e-05, |
|
"loss": 0.2626, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.3793103448275864e-05, |
|
"loss": 1.2788, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.376175548589342e-05, |
|
"loss": 0.4179, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.373040752351097e-05, |
|
"loss": 0.1952, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 4.369905956112853e-05, |
|
"loss": 0.7785, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.366771159874609e-05, |
|
"loss": 0.1975, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.3636363636363636e-05, |
|
"loss": 0.488, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.360501567398119e-05, |
|
"loss": 0.4872, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.357366771159875e-05, |
|
"loss": 0.205, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.3542319749216305e-05, |
|
"loss": 0.2869, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.351097178683386e-05, |
|
"loss": 0.4398, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.3479623824451414e-05, |
|
"loss": 0.8419, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.344827586206897e-05, |
|
"loss": 0.7001, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.341692789968652e-05, |
|
"loss": 0.1239, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 4.338557993730408e-05, |
|
"loss": 0.542, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.335423197492163e-05, |
|
"loss": 0.1033, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.3322884012539185e-05, |
|
"loss": 0.6765, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.3291536050156747e-05, |
|
"loss": 0.2388, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.3260188087774294e-05, |
|
"loss": 1.0428, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.322884012539185e-05, |
|
"loss": 1.0099, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.319749216300941e-05, |
|
"loss": 0.4892, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.3166144200626964e-05, |
|
"loss": 0.1608, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.313479623824452e-05, |
|
"loss": 0.9799, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.3103448275862066e-05, |
|
"loss": 0.0847, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.307210031347963e-05, |
|
"loss": 0.6962, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.304075235109718e-05, |
|
"loss": 0.5673, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.3009404388714735e-05, |
|
"loss": 0.2379, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.297805642633229e-05, |
|
"loss": 0.4849, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.294670846394985e-05, |
|
"loss": 0.4408, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.29153605015674e-05, |
|
"loss": 1.0882, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.288401253918495e-05, |
|
"loss": 0.0825, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.285266457680251e-05, |
|
"loss": 0.1645, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.282131661442007e-05, |
|
"loss": 1.538, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.278996865203762e-05, |
|
"loss": 0.1576, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.275862068965518e-05, |
|
"loss": 0.3003, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.2727272727272724e-05, |
|
"loss": 0.3405, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.2695924764890285e-05, |
|
"loss": 0.8745, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.266457680250784e-05, |
|
"loss": 0.8657, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 4.2633228840125394e-05, |
|
"loss": 0.6039, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.260188087774295e-05, |
|
"loss": 0.8955, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.257053291536051e-05, |
|
"loss": 0.203, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 4.253918495297806e-05, |
|
"loss": 0.4717, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.250783699059561e-05, |
|
"loss": 0.4653, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.2476489028213165e-05, |
|
"loss": 0.7486, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.2445141065830726e-05, |
|
"loss": 0.2863, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.241379310344828e-05, |
|
"loss": 0.1675, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.2382445141065835e-05, |
|
"loss": 0.3746, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.235109717868338e-05, |
|
"loss": 1.363, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.2319749216300944e-05, |
|
"loss": 0.2312, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.22884012539185e-05, |
|
"loss": 0.7131, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.225705329153605e-05, |
|
"loss": 0.5815, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.222570532915361e-05, |
|
"loss": 1.0779, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.219435736677117e-05, |
|
"loss": 0.3242, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.2163009404388715e-05, |
|
"loss": 0.1308, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.213166144200627e-05, |
|
"loss": 0.2272, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.2100313479623824e-05, |
|
"loss": 0.1048, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.2068965517241385e-05, |
|
"loss": 0.8957, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.203761755485894e-05, |
|
"loss": 0.277, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.2006269592476494e-05, |
|
"loss": 0.3144, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.197492163009404e-05, |
|
"loss": 0.1906, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.19435736677116e-05, |
|
"loss": 1.434, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.1912225705329157e-05, |
|
"loss": 0.5245, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.188087774294671e-05, |
|
"loss": 0.4078, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.1849529780564265e-05, |
|
"loss": 0.1577, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.181818181818182e-05, |
|
"loss": 0.1034, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.1786833855799374e-05, |
|
"loss": 0.1685, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.175548589341693e-05, |
|
"loss": 0.2631, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.172413793103448e-05, |
|
"loss": 1.1729, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.1692789968652043e-05, |
|
"loss": 0.4821, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.16614420062696e-05, |
|
"loss": 0.1952, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.1630094043887145e-05, |
|
"loss": 0.1677, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.15987460815047e-05, |
|
"loss": 0.7887, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.156739811912226e-05, |
|
"loss": 0.8876, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 4.1536050156739815e-05, |
|
"loss": 0.456, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.150470219435737e-05, |
|
"loss": 0.3973, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 4.1473354231974924e-05, |
|
"loss": 0.2558, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 4.144200626959248e-05, |
|
"loss": 0.8648, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.141065830721003e-05, |
|
"loss": 1.4092, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.1379310344827587e-05, |
|
"loss": 0.2367, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 4.134796238244514e-05, |
|
"loss": 0.1157, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.13166144200627e-05, |
|
"loss": 0.2577, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.1285266457680256e-05, |
|
"loss": 0.1873, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.1253918495297804e-05, |
|
"loss": 1.9265, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.122257053291536e-05, |
|
"loss": 0.7055, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.119122257053292e-05, |
|
"loss": 0.5346, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.1159874608150473e-05, |
|
"loss": 0.6616, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.112852664576803e-05, |
|
"loss": 0.534, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.109717868338558e-05, |
|
"loss": 0.8041, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.1065830721003136e-05, |
|
"loss": 0.3751, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.103448275862069e-05, |
|
"loss": 0.4831, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.1003134796238245e-05, |
|
"loss": 0.8319, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.09717868338558e-05, |
|
"loss": 0.3292, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.094043887147336e-05, |
|
"loss": 0.1887, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.0909090909090915e-05, |
|
"loss": 0.9955, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.087774294670846e-05, |
|
"loss": 0.4777, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 4.0846394984326017e-05, |
|
"loss": 0.9053, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.081504702194358e-05, |
|
"loss": 0.928, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.078369905956113e-05, |
|
"loss": 1.0885, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.0752351097178686e-05, |
|
"loss": 0.2145, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.072100313479624e-05, |
|
"loss": 0.1522, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.0689655172413795e-05, |
|
"loss": 0.5739, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.065830721003135e-05, |
|
"loss": 0.4077, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.0626959247648904e-05, |
|
"loss": 0.3718, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 4.059561128526646e-05, |
|
"loss": 0.2852, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.056426332288402e-05, |
|
"loss": 0.4187, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.0532915360501566e-05, |
|
"loss": 0.7516, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 4.050156739811912e-05, |
|
"loss": 0.0868, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 4.0470219435736675e-05, |
|
"loss": 0.4357, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 4.0438871473354236e-05, |
|
"loss": 0.5593, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.040752351097179e-05, |
|
"loss": 0.3828, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.0376175548589345e-05, |
|
"loss": 0.638, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 4.03448275862069e-05, |
|
"loss": 0.6698, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.0313479623824453e-05, |
|
"loss": 0.3484, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.028213166144201e-05, |
|
"loss": 1.1265, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 4.025078369905956e-05, |
|
"loss": 0.7538, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.0219435736677116e-05, |
|
"loss": 0.2539, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.018808777429468e-05, |
|
"loss": 0.5086, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 4.0156739811912225e-05, |
|
"loss": 0.8172, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 4.012539184952978e-05, |
|
"loss": 0.4001, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 4.0094043887147334e-05, |
|
"loss": 0.3098, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.0062695924764895e-05, |
|
"loss": 0.9891, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.003134796238245e-05, |
|
"loss": 0.5662, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4e-05, |
|
"loss": 0.598, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_bleu": 0.22143979906815262, |
|
"eval_loss": 0.5837797522544861, |
|
"eval_runtime": 76.2397, |
|
"eval_samples_per_second": 4.224, |
|
"eval_steps_per_second": 1.062, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 3.996865203761756e-05, |
|
"loss": 0.4468, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 3.993730407523511e-05, |
|
"loss": 1.2271, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 3.9905956112852666e-05, |
|
"loss": 0.1244, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 3.987460815047022e-05, |
|
"loss": 0.0909, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 3.9843260188087775e-05, |
|
"loss": 0.9222, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 3.9811912225705336e-05, |
|
"loss": 0.12, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 3.9780564263322883e-05, |
|
"loss": 0.7242, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 3.974921630094044e-05, |
|
"loss": 1.1717, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 3.971786833855799e-05, |
|
"loss": 0.0869, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 3.968652037617555e-05, |
|
"loss": 0.1233, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 3.965517241379311e-05, |
|
"loss": 0.4636, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 3.962382445141066e-05, |
|
"loss": 0.2821, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 3.9592476489028216e-05, |
|
"loss": 0.0841, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 3.956112852664577e-05, |
|
"loss": 0.1022, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 3.9529780564263325e-05, |
|
"loss": 0.2407, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 3.949843260188088e-05, |
|
"loss": 0.1577, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 3.946708463949843e-05, |
|
"loss": 0.7505, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 3.9435736677115994e-05, |
|
"loss": 1.743, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 3.940438871473354e-05, |
|
"loss": 0.1138, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 3.9373040752351096e-05, |
|
"loss": 0.074, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 3.934169278996865e-05, |
|
"loss": 0.3786, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.931034482758621e-05, |
|
"loss": 0.4062, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.9278996865203766e-05, |
|
"loss": 0.0661, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.924764890282132e-05, |
|
"loss": 0.9205, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.9216300940438875e-05, |
|
"loss": 0.0813, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.918495297805643e-05, |
|
"loss": 0.6916, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.915360501567398e-05, |
|
"loss": 1.1737, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.912225705329154e-05, |
|
"loss": 0.3149, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.909090909090909e-05, |
|
"loss": 0.4842, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.9059561128526646e-05, |
|
"loss": 0.636, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.90282131661442e-05, |
|
"loss": 0.0862, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.8996865203761755e-05, |
|
"loss": 0.5014, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.896551724137931e-05, |
|
"loss": 0.5606, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.893416927899687e-05, |
|
"loss": 0.2375, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.8902821316614424e-05, |
|
"loss": 0.0881, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.887147335423197e-05, |
|
"loss": 0.2358, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.884012539184953e-05, |
|
"loss": 0.0943, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.880877742946709e-05, |
|
"loss": 0.7039, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.877742946708464e-05, |
|
"loss": 0.9661, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.8746081504702196e-05, |
|
"loss": 0.7479, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.871473354231975e-05, |
|
"loss": 0.1651, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.8683385579937305e-05, |
|
"loss": 0.0789, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.865203761755486e-05, |
|
"loss": 0.5148, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 3.862068965517241e-05, |
|
"loss": 0.5988, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 3.8589341692789974e-05, |
|
"loss": 0.1082, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.855799373040753e-05, |
|
"loss": 1.2606, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.852664576802508e-05, |
|
"loss": 0.321, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 3.849529780564263e-05, |
|
"loss": 0.7046, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 3.846394984326019e-05, |
|
"loss": 0.7642, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 3.8432601880877746e-05, |
|
"loss": 0.4196, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 3.84012539184953e-05, |
|
"loss": 0.7299, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 3.8369905956112854e-05, |
|
"loss": 0.9111, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 3.833855799373041e-05, |
|
"loss": 0.0798, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 3.830721003134796e-05, |
|
"loss": 0.2813, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 3.827586206896552e-05, |
|
"loss": 0.1674, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 3.824451410658307e-05, |
|
"loss": 0.074, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 3.821316614420063e-05, |
|
"loss": 0.6165, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 3.818181818181819e-05, |
|
"loss": 0.6605, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 3.815047021943574e-05, |
|
"loss": 0.3572, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 3.811912225705329e-05, |
|
"loss": 1.0105, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 3.808777429467085e-05, |
|
"loss": 0.0995, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 3.8056426332288404e-05, |
|
"loss": 0.1342, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 3.802507836990596e-05, |
|
"loss": 0.139, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 3.799373040752351e-05, |
|
"loss": 0.5829, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 3.796238244514107e-05, |
|
"loss": 0.6066, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 3.793103448275862e-05, |
|
"loss": 0.6555, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 3.7899686520376176e-05, |
|
"loss": 0.0645, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 3.786833855799373e-05, |
|
"loss": 0.5722, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 3.783699059561129e-05, |
|
"loss": 0.1646, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 3.7805642633228846e-05, |
|
"loss": 0.2397, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 3.777429467084639e-05, |
|
"loss": 0.1902, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 3.774294670846395e-05, |
|
"loss": 0.175, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 3.771159874608151e-05, |
|
"loss": 0.7741, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 3.768025078369906e-05, |
|
"loss": 0.1346, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 3.764890282131662e-05, |
|
"loss": 0.5639, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 3.761755485893417e-05, |
|
"loss": 0.595, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 3.7586206896551726e-05, |
|
"loss": 0.1199, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 3.755485893416928e-05, |
|
"loss": 0.8169, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 3.7523510971786834e-05, |
|
"loss": 0.1015, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 3.749216300940439e-05, |
|
"loss": 0.0699, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 3.746081504702195e-05, |
|
"loss": 0.5325, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 3.7429467084639504e-05, |
|
"loss": 0.2893, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 3.739811912225705e-05, |
|
"loss": 0.4056, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 3.7366771159874606e-05, |
|
"loss": 0.3284, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 3.733542319749217e-05, |
|
"loss": 0.1071, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 3.730407523510972e-05, |
|
"loss": 0.1821, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 3.7272727272727276e-05, |
|
"loss": 0.231, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 3.724137931034483e-05, |
|
"loss": 0.2871, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 3.7210031347962384e-05, |
|
"loss": 0.3931, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 3.717868338557994e-05, |
|
"loss": 0.6065, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 3.714733542319749e-05, |
|
"loss": 0.2118, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 3.711598746081505e-05, |
|
"loss": 0.0835, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 3.708463949843261e-05, |
|
"loss": 0.181, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 3.705329153605016e-05, |
|
"loss": 0.0591, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 3.702194357366771e-05, |
|
"loss": 0.1432, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 3.6990595611285264e-05, |
|
"loss": 0.4398, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 3.6959247648902825e-05, |
|
"loss": 0.3443, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 3.692789968652038e-05, |
|
"loss": 0.0413, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 3.6896551724137934e-05, |
|
"loss": 0.4842, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 3.686520376175549e-05, |
|
"loss": 0.5001, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 3.683385579937304e-05, |
|
"loss": 0.6096, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 3.68025078369906e-05, |
|
"loss": 0.1528, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 3.677115987460815e-05, |
|
"loss": 0.3878, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 3.6739811912225706e-05, |
|
"loss": 0.0934, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 3.670846394984327e-05, |
|
"loss": 0.3619, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 3.667711598746082e-05, |
|
"loss": 0.8191, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 3.664576802507837e-05, |
|
"loss": 0.2583, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 3.661442006269592e-05, |
|
"loss": 0.4955, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 3.6583072100313484e-05, |
|
"loss": 0.1636, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 3.655172413793104e-05, |
|
"loss": 0.1809, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 3.652037617554859e-05, |
|
"loss": 0.1182, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 3.648902821316615e-05, |
|
"loss": 1.4724, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 3.64576802507837e-05, |
|
"loss": 0.5265, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 3.6426332288401256e-05, |
|
"loss": 0.1006, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 3.639498432601881e-05, |
|
"loss": 0.9536, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 3.6363636363636364e-05, |
|
"loss": 0.7936, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 3.6332288401253925e-05, |
|
"loss": 0.1084, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.630094043887147e-05, |
|
"loss": 0.5039, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.626959247648903e-05, |
|
"loss": 0.2569, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.623824451410658e-05, |
|
"loss": 0.7229, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.620689655172414e-05, |
|
"loss": 0.1591, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.61755485893417e-05, |
|
"loss": 0.3813, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.614420062695925e-05, |
|
"loss": 0.5446, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.61128526645768e-05, |
|
"loss": 0.0682, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.608150470219436e-05, |
|
"loss": 0.1326, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.6050156739811914e-05, |
|
"loss": 0.2526, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.601880877742947e-05, |
|
"loss": 0.1167, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.598746081504702e-05, |
|
"loss": 0.2939, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.5956112852664584e-05, |
|
"loss": 0.4313, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.592476489028213e-05, |
|
"loss": 0.1435, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.5893416927899686e-05, |
|
"loss": 0.4394, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.586206896551724e-05, |
|
"loss": 0.4121, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.58307210031348e-05, |
|
"loss": 0.1662, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 3.5799373040752355e-05, |
|
"loss": 0.9565, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 3.576802507836991e-05, |
|
"loss": 0.2431, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 3.573667711598746e-05, |
|
"loss": 0.2913, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 3.570532915360502e-05, |
|
"loss": 0.3142, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 3.567398119122257e-05, |
|
"loss": 0.8352, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 3.564263322884013e-05, |
|
"loss": 0.2264, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.561128526645768e-05, |
|
"loss": 0.1313, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.557993730407524e-05, |
|
"loss": 0.2702, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 3.554858934169279e-05, |
|
"loss": 0.1185, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.5517241379310344e-05, |
|
"loss": 0.8046, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.54858934169279e-05, |
|
"loss": 0.8257, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 3.545454545454546e-05, |
|
"loss": 0.1377, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.5423197492163014e-05, |
|
"loss": 1.2093, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.539184952978057e-05, |
|
"loss": 0.6203, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.5360501567398116e-05, |
|
"loss": 0.5519, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.532915360501568e-05, |
|
"loss": 0.8382, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 3.529780564263323e-05, |
|
"loss": 0.3924, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 3.5266457680250785e-05, |
|
"loss": 0.344, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 3.523510971786834e-05, |
|
"loss": 0.1352, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 3.52037617554859e-05, |
|
"loss": 0.4352, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 3.517241379310345e-05, |
|
"loss": 0.3054, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 3.5141065830721e-05, |
|
"loss": 0.1816, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.510971786833856e-05, |
|
"loss": 0.1088, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.507836990595612e-05, |
|
"loss": 0.0878, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 3.504702194357367e-05, |
|
"loss": 0.5545, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 3.501567398119122e-05, |
|
"loss": 0.105, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_bleu": 0.23230339252724816, |
|
"eval_loss": 0.6111631989479065, |
|
"eval_runtime": 76.2702, |
|
"eval_samples_per_second": 4.222, |
|
"eval_steps_per_second": 1.062, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 3.4984326018808774e-05, |
|
"loss": 0.1126, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 3.4952978056426335e-05, |
|
"loss": 0.0723, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 3.492163009404389e-05, |
|
"loss": 0.108, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 3.4890282131661444e-05, |
|
"loss": 0.3811, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 3.4858934169279e-05, |
|
"loss": 0.3857, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 3.482758620689655e-05, |
|
"loss": 0.8567, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 3.479623824451411e-05, |
|
"loss": 0.0461, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 3.476489028213166e-05, |
|
"loss": 0.6084, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 3.4733542319749215e-05, |
|
"loss": 0.3917, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 3.4702194357366776e-05, |
|
"loss": 0.1739, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 3.467084639498433e-05, |
|
"loss": 0.0362, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 3.463949843260188e-05, |
|
"loss": 0.1756, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 3.460815047021943e-05, |
|
"loss": 0.0617, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 3.4576802507836994e-05, |
|
"loss": 0.1196, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 3.454545454545455e-05, |
|
"loss": 0.1394, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 3.45141065830721e-05, |
|
"loss": 0.1394, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 3.4482758620689657e-05, |
|
"loss": 0.1374, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 3.445141065830721e-05, |
|
"loss": 0.3252, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 3.4420062695924765e-05, |
|
"loss": 0.6278, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 3.438871473354232e-05, |
|
"loss": 0.2991, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 3.4357366771159874e-05, |
|
"loss": 0.036, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 3.4326018808777435e-05, |
|
"loss": 0.1012, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 3.429467084639499e-05, |
|
"loss": 0.4556, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 3.426332288401254e-05, |
|
"loss": 0.3598, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 3.423197492163009e-05, |
|
"loss": 0.2032, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 3.420062695924765e-05, |
|
"loss": 0.3862, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 3.4169278996865206e-05, |
|
"loss": 1.1844, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 3.413793103448276e-05, |
|
"loss": 0.6972, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 3.4106583072100315e-05, |
|
"loss": 0.4301, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 3.407523510971787e-05, |
|
"loss": 0.2322, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 3.4043887147335424e-05, |
|
"loss": 0.1826, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 3.401253918495298e-05, |
|
"loss": 0.0967, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 3.398119122257053e-05, |
|
"loss": 0.22, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 3.3949843260188093e-05, |
|
"loss": 0.2791, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 3.391849529780565e-05, |
|
"loss": 0.0812, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 3.3887147335423195e-05, |
|
"loss": 0.2805, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 3.3855799373040756e-05, |
|
"loss": 0.3116, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 3.382445141065831e-05, |
|
"loss": 0.7543, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 3.3793103448275865e-05, |
|
"loss": 0.2767, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 3.376175548589342e-05, |
|
"loss": 0.3967, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 3.3730407523510974e-05, |
|
"loss": 0.3185, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 3.369905956112853e-05, |
|
"loss": 0.0727, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 3.366771159874608e-05, |
|
"loss": 0.7915, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 3.3636363636363636e-05, |
|
"loss": 0.288, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 3.360501567398119e-05, |
|
"loss": 0.2361, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 3.357366771159875e-05, |
|
"loss": 0.4873, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 3.35423197492163e-05, |
|
"loss": 0.3298, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 3.3510971786833854e-05, |
|
"loss": 0.4757, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 3.3479623824451415e-05, |
|
"loss": 0.0676, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 3.344827586206897e-05, |
|
"loss": 0.5626, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 3.3416927899686523e-05, |
|
"loss": 0.0133, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 3.338557993730408e-05, |
|
"loss": 0.0623, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 3.335423197492163e-05, |
|
"loss": 0.7427, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 3.3322884012539186e-05, |
|
"loss": 0.7643, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 3.329153605015674e-05, |
|
"loss": 0.5679, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 3.3260188087774295e-05, |
|
"loss": 0.0973, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 3.322884012539185e-05, |
|
"loss": 0.2743, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 3.319749216300941e-05, |
|
"loss": 0.1096, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 3.316614420062696e-05, |
|
"loss": 0.2269, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 3.313479623824451e-05, |
|
"loss": 0.087, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 3.310344827586207e-05, |
|
"loss": 0.0691, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 3.307210031347963e-05, |
|
"loss": 0.2738, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 3.304075235109718e-05, |
|
"loss": 0.1897, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 3.3009404388714736e-05, |
|
"loss": 0.0346, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 3.297805642633229e-05, |
|
"loss": 0.4387, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 3.2946708463949845e-05, |
|
"loss": 0.2304, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 3.29153605015674e-05, |
|
"loss": 0.0823, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 3.2884012539184953e-05, |
|
"loss": 0.2337, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 3.285266457680251e-05, |
|
"loss": 0.0941, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 3.282131661442007e-05, |
|
"loss": 0.277, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 3.2789968652037616e-05, |
|
"loss": 0.1885, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 3.275862068965517e-05, |
|
"loss": 0.7624, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 3.272727272727273e-05, |
|
"loss": 0.4512, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 3.2695924764890286e-05, |
|
"loss": 0.3692, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 3.266457680250784e-05, |
|
"loss": 0.0467, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 3.2633228840125395e-05, |
|
"loss": 0.1802, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 3.260188087774295e-05, |
|
"loss": 0.0781, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 3.25705329153605e-05, |
|
"loss": 0.3144, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 3.253918495297806e-05, |
|
"loss": 0.1413, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 3.250783699059561e-05, |
|
"loss": 0.5225, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 3.2476489028213166e-05, |
|
"loss": 0.4774, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 3.244514106583073e-05, |
|
"loss": 0.4574, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 3.2413793103448275e-05, |
|
"loss": 0.0705, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 3.238244514106583e-05, |
|
"loss": 0.7696, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 3.235109717868339e-05, |
|
"loss": 0.2546, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 3.2319749216300945e-05, |
|
"loss": 0.0952, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 3.22884012539185e-05, |
|
"loss": 0.1475, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 3.2257053291536046e-05, |
|
"loss": 0.0742, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 3.222570532915361e-05, |
|
"loss": 0.4851, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 3.219435736677116e-05, |
|
"loss": 0.0518, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 3.2163009404388716e-05, |
|
"loss": 0.0556, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 3.213166144200627e-05, |
|
"loss": 0.1462, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 3.2100313479623825e-05, |
|
"loss": 0.1303, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 3.206896551724138e-05, |
|
"loss": 0.4243, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 3.203761755485893e-05, |
|
"loss": 0.0866, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 3.200626959247649e-05, |
|
"loss": 0.2413, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 3.197492163009405e-05, |
|
"loss": 0.0898, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 3.19435736677116e-05, |
|
"loss": 0.0549, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 3.191222570532916e-05, |
|
"loss": 0.4616, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 3.1880877742946705e-05, |
|
"loss": 0.5832, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 3.1849529780564266e-05, |
|
"loss": 0.0782, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 3.181818181818182e-05, |
|
"loss": 0.1716, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 3.1786833855799375e-05, |
|
"loss": 0.0763, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 3.175548589341693e-05, |
|
"loss": 0.2826, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.172413793103448e-05, |
|
"loss": 0.3216, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.169278996865204e-05, |
|
"loss": 0.1686, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 3.166144200626959e-05, |
|
"loss": 0.2955, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 3.1630094043887146e-05, |
|
"loss": 0.4539, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.159874608150471e-05, |
|
"loss": 0.3451, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.156739811912226e-05, |
|
"loss": 0.1087, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.1536050156739816e-05, |
|
"loss": 0.5204, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.1504702194357363e-05, |
|
"loss": 0.5709, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.1473354231974925e-05, |
|
"loss": 0.2778, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.144200626959248e-05, |
|
"loss": 0.2798, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 3.141065830721003e-05, |
|
"loss": 0.2836, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 3.137931034482759e-05, |
|
"loss": 0.0983, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 3.134796238244515e-05, |
|
"loss": 0.069, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.1316614420062696e-05, |
|
"loss": 0.3286, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.128526645768025e-05, |
|
"loss": 0.0437, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 3.1253918495297805e-05, |
|
"loss": 0.2272, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 3.1222570532915366e-05, |
|
"loss": 0.8885, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 3.119122257053292e-05, |
|
"loss": 0.5747, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 3.1159874608150474e-05, |
|
"loss": 0.2236, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 3.112852664576802e-05, |
|
"loss": 0.1701, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 3.109717868338558e-05, |
|
"loss": 0.1269, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 3.106583072100314e-05, |
|
"loss": 0.6536, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 3.103448275862069e-05, |
|
"loss": 0.3657, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 3.1003134796238246e-05, |
|
"loss": 0.6285, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 3.09717868338558e-05, |
|
"loss": 1.1006, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 3.0940438871473355e-05, |
|
"loss": 0.2188, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 3.090909090909091e-05, |
|
"loss": 0.8207, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 3.087774294670846e-05, |
|
"loss": 0.4003, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 3.0846394984326024e-05, |
|
"loss": 0.1426, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 3.081504702194358e-05, |
|
"loss": 0.7219, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 3.0783699059561126e-05, |
|
"loss": 0.3991, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 3.075235109717868e-05, |
|
"loss": 0.1892, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 3.072100313479624e-05, |
|
"loss": 0.1499, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 3.0689655172413796e-05, |
|
"loss": 0.4832, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 3.065830721003135e-05, |
|
"loss": 0.6595, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 3.0626959247648904e-05, |
|
"loss": 0.3086, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 3.059561128526646e-05, |
|
"loss": 0.0469, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 3.056426332288401e-05, |
|
"loss": 0.2976, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 3.053291536050157e-05, |
|
"loss": 0.1056, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 3.0501567398119125e-05, |
|
"loss": 0.1464, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 3.047021943573668e-05, |
|
"loss": 0.324, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 3.0438871473354237e-05, |
|
"loss": 0.1886, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 3.0407523510971785e-05, |
|
"loss": 0.0803, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 3.0376175548589342e-05, |
|
"loss": 0.6133, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 3.0344827586206897e-05, |
|
"loss": 0.446, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 3.0313479623824454e-05, |
|
"loss": 0.5536, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 3.028213166144201e-05, |
|
"loss": 1.2157, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 3.0250783699059566e-05, |
|
"loss": 0.1825, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 3.0219435736677114e-05, |
|
"loss": 0.0522, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 3.018808777429467e-05, |
|
"loss": 0.6611, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 3.0156739811912226e-05, |
|
"loss": 0.6795, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 3.0125391849529784e-05, |
|
"loss": 0.4723, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 3.0094043887147338e-05, |
|
"loss": 0.29, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 3.0062695924764896e-05, |
|
"loss": 0.7305, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 3.0031347962382443e-05, |
|
"loss": 0.5894, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 3e-05, |
|
"loss": 0.9304, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_bleu": 0.23021236407798268, |
|
"eval_loss": 0.6479771733283997, |
|
"eval_runtime": 76.2455, |
|
"eval_samples_per_second": 4.223, |
|
"eval_steps_per_second": 1.062, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 2.9968652037617555e-05, |
|
"loss": 0.5475, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 2.9937304075235113e-05, |
|
"loss": 0.1194, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 2.9905956112852667e-05, |
|
"loss": 0.0192, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 2.9874608150470225e-05, |
|
"loss": 0.0354, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 2.9843260188087772e-05, |
|
"loss": 0.1959, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 2.981191222570533e-05, |
|
"loss": 0.6525, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 2.9780564263322884e-05, |
|
"loss": 0.0693, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 2.9749216300940442e-05, |
|
"loss": 0.4917, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 2.9717868338557996e-05, |
|
"loss": 0.1523, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 2.9686520376175547e-05, |
|
"loss": 0.262, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 2.96551724137931e-05, |
|
"loss": 0.5281, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 2.962382445141066e-05, |
|
"loss": 0.0967, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 2.9592476489028214e-05, |
|
"loss": 0.1589, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 2.956112852664577e-05, |
|
"loss": 0.4952, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 2.9529780564263326e-05, |
|
"loss": 0.5283, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 2.9498432601880876e-05, |
|
"loss": 0.4887, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 2.946708463949843e-05, |
|
"loss": 0.341, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 2.943573667711599e-05, |
|
"loss": 0.0833, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 2.9404388714733543e-05, |
|
"loss": 0.2342, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 2.93730407523511e-05, |
|
"loss": 0.1071, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 2.9341692789968655e-05, |
|
"loss": 0.1208, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 2.9310344827586206e-05, |
|
"loss": 0.2908, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 2.927899686520376e-05, |
|
"loss": 0.0633, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 2.9247648902821318e-05, |
|
"loss": 0.3758, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 2.9216300940438872e-05, |
|
"loss": 0.0444, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 2.918495297805643e-05, |
|
"loss": 0.0413, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 2.9153605015673984e-05, |
|
"loss": 0.3736, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 2.9122257053291535e-05, |
|
"loss": 0.2654, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 2.909090909090909e-05, |
|
"loss": 0.0624, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 2.9059561128526647e-05, |
|
"loss": 0.4802, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 2.90282131661442e-05, |
|
"loss": 0.3167, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 2.899686520376176e-05, |
|
"loss": 0.0891, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 2.8965517241379313e-05, |
|
"loss": 0.1864, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 2.8934169278996864e-05, |
|
"loss": 0.202, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 2.890282131661442e-05, |
|
"loss": 0.0599, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 2.8871473354231976e-05, |
|
"loss": 0.4572, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 2.884012539184953e-05, |
|
"loss": 0.164, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 2.8808777429467088e-05, |
|
"loss": 0.1774, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 2.8777429467084643e-05, |
|
"loss": 0.2898, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 2.8746081504702193e-05, |
|
"loss": 0.098, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 2.8714733542319748e-05, |
|
"loss": 0.3329, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 2.8683385579937305e-05, |
|
"loss": 0.2385, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 2.865203761755486e-05, |
|
"loss": 0.3153, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 2.8620689655172417e-05, |
|
"loss": 0.4507, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 2.8589341692789972e-05, |
|
"loss": 0.1456, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 2.8557993730407523e-05, |
|
"loss": 0.3013, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 2.8526645768025077e-05, |
|
"loss": 0.7018, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 2.8495297805642635e-05, |
|
"loss": 0.3912, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 2.846394984326019e-05, |
|
"loss": 0.135, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 2.8432601880877747e-05, |
|
"loss": 0.2511, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 2.84012539184953e-05, |
|
"loss": 0.1477, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 2.8369905956112852e-05, |
|
"loss": 0.2851, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 2.833855799373041e-05, |
|
"loss": 0.0683, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 2.8307210031347964e-05, |
|
"loss": 0.0769, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 2.8275862068965518e-05, |
|
"loss": 0.8171, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 2.8244514106583076e-05, |
|
"loss": 0.0492, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 2.8213166144200627e-05, |
|
"loss": 0.067, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 2.818181818181818e-05, |
|
"loss": 0.4145, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 2.815047021943574e-05, |
|
"loss": 0.0523, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 2.8119122257053293e-05, |
|
"loss": 0.2317, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 2.8087774294670848e-05, |
|
"loss": 0.0684, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 2.8056426332288405e-05, |
|
"loss": 0.4624, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 2.8025078369905956e-05, |
|
"loss": 0.0585, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 2.799373040752351e-05, |
|
"loss": 0.1308, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 2.7962382445141068e-05, |
|
"loss": 0.8529, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 2.7931034482758622e-05, |
|
"loss": 0.1775, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 2.7899686520376177e-05, |
|
"loss": 0.0883, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 2.7868338557993734e-05, |
|
"loss": 0.3945, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 2.7836990595611285e-05, |
|
"loss": 0.5306, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 2.780564263322884e-05, |
|
"loss": 0.2451, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 2.7774294670846397e-05, |
|
"loss": 0.469, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 2.774294670846395e-05, |
|
"loss": 0.248, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 2.7711598746081506e-05, |
|
"loss": 1.7219, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 2.7680250783699064e-05, |
|
"loss": 0.1982, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 2.7648902821316615e-05, |
|
"loss": 0.6336, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 2.761755485893417e-05, |
|
"loss": 0.0537, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 2.7586206896551727e-05, |
|
"loss": 0.0902, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 2.755485893416928e-05, |
|
"loss": 0.5485, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 2.7523510971786835e-05, |
|
"loss": 0.2901, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 2.7492163009404393e-05, |
|
"loss": 0.1135, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 2.7460815047021944e-05, |
|
"loss": 0.0811, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 2.7429467084639498e-05, |
|
"loss": 0.0689, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 2.7398119122257056e-05, |
|
"loss": 0.5802, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 2.736677115987461e-05, |
|
"loss": 0.075, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 2.7335423197492164e-05, |
|
"loss": 0.3645, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 2.7304075235109722e-05, |
|
"loss": 0.0482, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 2.7272727272727273e-05, |
|
"loss": 0.4667, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 2.7241379310344827e-05, |
|
"loss": 0.5588, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 2.7210031347962385e-05, |
|
"loss": 0.0841, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 2.717868338557994e-05, |
|
"loss": 0.324, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 2.7147335423197494e-05, |
|
"loss": 0.394, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 2.711598746081505e-05, |
|
"loss": 0.6257, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 2.7084639498432602e-05, |
|
"loss": 0.4819, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 2.7053291536050157e-05, |
|
"loss": 0.5093, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 2.7021943573667714e-05, |
|
"loss": 0.0432, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 2.699059561128527e-05, |
|
"loss": 0.0653, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 2.6959247648902823e-05, |
|
"loss": 0.0196, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 2.6927899686520374e-05, |
|
"loss": 0.2154, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 2.689655172413793e-05, |
|
"loss": 0.2095, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 2.6865203761755486e-05, |
|
"loss": 0.0493, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 2.6833855799373044e-05, |
|
"loss": 0.9636, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 2.6802507836990598e-05, |
|
"loss": 0.181, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 2.6771159874608152e-05, |
|
"loss": 0.1509, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 2.6739811912225703e-05, |
|
"loss": 0.1301, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 2.670846394984326e-05, |
|
"loss": 0.0986, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 2.6677115987460815e-05, |
|
"loss": 0.2525, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 2.6645768025078373e-05, |
|
"loss": 0.0452, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 2.6614420062695927e-05, |
|
"loss": 0.0435, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 2.658307210031348e-05, |
|
"loss": 0.2368, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 2.6551724137931032e-05, |
|
"loss": 0.0974, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 2.652037617554859e-05, |
|
"loss": 0.0475, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 2.6489028213166144e-05, |
|
"loss": 0.1832, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 2.6457680250783702e-05, |
|
"loss": 0.4982, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 2.6426332288401256e-05, |
|
"loss": 0.2645, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 2.639498432601881e-05, |
|
"loss": 0.1578, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 2.636363636363636e-05, |
|
"loss": 0.1172, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 2.633228840125392e-05, |
|
"loss": 0.3035, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 2.6300940438871474e-05, |
|
"loss": 0.7947, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 2.626959247648903e-05, |
|
"loss": 0.0331, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 2.6238244514106586e-05, |
|
"loss": 0.04, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 2.620689655172414e-05, |
|
"loss": 0.2473, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 2.617554858934169e-05, |
|
"loss": 0.958, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 2.614420062695925e-05, |
|
"loss": 0.1583, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 2.6112852664576803e-05, |
|
"loss": 0.4548, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 2.608150470219436e-05, |
|
"loss": 0.3725, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 2.6050156739811915e-05, |
|
"loss": 0.302, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 2.601880877742947e-05, |
|
"loss": 0.0498, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 2.598746081504702e-05, |
|
"loss": 0.2619, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 2.5956112852664578e-05, |
|
"loss": 0.0683, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 2.5924764890282132e-05, |
|
"loss": 0.3522, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 2.589341692789969e-05, |
|
"loss": 0.533, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 2.5862068965517244e-05, |
|
"loss": 0.1604, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 2.5830721003134802e-05, |
|
"loss": 0.0985, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 2.579937304075235e-05, |
|
"loss": 0.5565, |
|
"step": 1544 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 2.5768025078369907e-05, |
|
"loss": 0.045, |
|
"step": 1546 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 2.573667711598746e-05, |
|
"loss": 0.0322, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 2.570532915360502e-05, |
|
"loss": 0.0435, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 2.5673981191222573e-05, |
|
"loss": 0.0321, |
|
"step": 1552 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 2.5642633228840124e-05, |
|
"loss": 0.1644, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 2.561128526645768e-05, |
|
"loss": 0.0317, |
|
"step": 1556 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 2.5579937304075236e-05, |
|
"loss": 0.0893, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 2.554858934169279e-05, |
|
"loss": 0.1798, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 2.551724137931035e-05, |
|
"loss": 0.2309, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 2.5485893416927903e-05, |
|
"loss": 0.142, |
|
"step": 1564 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 2.5454545454545454e-05, |
|
"loss": 0.2153, |
|
"step": 1566 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 2.5423197492163008e-05, |
|
"loss": 0.3906, |
|
"step": 1568 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 2.5391849529780566e-05, |
|
"loss": 0.0391, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 2.536050156739812e-05, |
|
"loss": 0.2337, |
|
"step": 1572 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 2.5329153605015678e-05, |
|
"loss": 0.0839, |
|
"step": 1574 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 2.5297805642633232e-05, |
|
"loss": 0.0837, |
|
"step": 1576 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 2.5266457680250783e-05, |
|
"loss": 0.1193, |
|
"step": 1578 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 2.5235109717868337e-05, |
|
"loss": 0.6238, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 2.5203761755485895e-05, |
|
"loss": 0.2111, |
|
"step": 1582 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 2.517241379310345e-05, |
|
"loss": 0.2726, |
|
"step": 1584 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 2.5141065830721007e-05, |
|
"loss": 0.1368, |
|
"step": 1586 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 2.510971786833856e-05, |
|
"loss": 0.3613, |
|
"step": 1588 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 2.5078369905956112e-05, |
|
"loss": 0.2065, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 2.5047021943573666e-05, |
|
"loss": 0.4981, |
|
"step": 1592 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 2.5015673981191224e-05, |
|
"loss": 0.0737, |
|
"step": 1594 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_bleu": 0.24300561580847288, |
|
"eval_loss": 0.699813723564148, |
|
"eval_runtime": 75.6288, |
|
"eval_samples_per_second": 4.258, |
|
"eval_steps_per_second": 1.071, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 2.498432601880878e-05, |
|
"loss": 0.165, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 2.4952978056426333e-05, |
|
"loss": 0.3377, |
|
"step": 1598 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 2.4921630094043887e-05, |
|
"loss": 0.0247, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 2.4890282131661445e-05, |
|
"loss": 0.5538, |
|
"step": 1602 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 2.4858934169278996e-05, |
|
"loss": 0.293, |
|
"step": 1604 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 2.4827586206896553e-05, |
|
"loss": 0.4556, |
|
"step": 1606 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 2.4796238244514108e-05, |
|
"loss": 0.1204, |
|
"step": 1608 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 2.4764890282131662e-05, |
|
"loss": 0.0708, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 2.4733542319749216e-05, |
|
"loss": 0.1078, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 2.4702194357366774e-05, |
|
"loss": 0.0905, |
|
"step": 1614 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 2.4670846394984325e-05, |
|
"loss": 0.1489, |
|
"step": 1616 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 2.4639498432601883e-05, |
|
"loss": 0.0293, |
|
"step": 1618 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 2.4608150470219437e-05, |
|
"loss": 0.1292, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 2.457680250783699e-05, |
|
"loss": 0.0614, |
|
"step": 1622 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 2.4545454545454545e-05, |
|
"loss": 0.7514, |
|
"step": 1624 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 2.4514106583072103e-05, |
|
"loss": 0.4222, |
|
"step": 1626 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 2.4482758620689654e-05, |
|
"loss": 0.0385, |
|
"step": 1628 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 2.4451410658307212e-05, |
|
"loss": 0.3357, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 2.4420062695924766e-05, |
|
"loss": 0.0195, |
|
"step": 1632 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 2.438871473354232e-05, |
|
"loss": 0.0473, |
|
"step": 1634 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 2.4357366771159875e-05, |
|
"loss": 0.2076, |
|
"step": 1636 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 2.4326018808777432e-05, |
|
"loss": 0.034, |
|
"step": 1638 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 2.4294670846394983e-05, |
|
"loss": 0.0428, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 2.426332288401254e-05, |
|
"loss": 0.1964, |
|
"step": 1642 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 2.4231974921630095e-05, |
|
"loss": 0.0614, |
|
"step": 1644 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 2.420062695924765e-05, |
|
"loss": 0.1043, |
|
"step": 1646 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 2.4169278996865204e-05, |
|
"loss": 0.0978, |
|
"step": 1648 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 2.413793103448276e-05, |
|
"loss": 0.085, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 2.4106583072100313e-05, |
|
"loss": 0.0285, |
|
"step": 1652 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 2.407523510971787e-05, |
|
"loss": 0.1226, |
|
"step": 1654 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 2.4043887147335425e-05, |
|
"loss": 0.0345, |
|
"step": 1656 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 2.401253918495298e-05, |
|
"loss": 0.6952, |
|
"step": 1658 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 2.3981191222570533e-05, |
|
"loss": 0.0175, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 2.394984326018809e-05, |
|
"loss": 0.083, |
|
"step": 1662 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 2.3918495297805642e-05, |
|
"loss": 0.0958, |
|
"step": 1664 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 2.38871473354232e-05, |
|
"loss": 0.1784, |
|
"step": 1666 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 2.3855799373040754e-05, |
|
"loss": 0.2114, |
|
"step": 1668 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 2.3824451410658308e-05, |
|
"loss": 0.0272, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 2.3793103448275862e-05, |
|
"loss": 0.3051, |
|
"step": 1672 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 2.376175548589342e-05, |
|
"loss": 0.2488, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 2.373040752351097e-05, |
|
"loss": 0.1616, |
|
"step": 1676 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 2.369905956112853e-05, |
|
"loss": 0.2637, |
|
"step": 1678 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 2.3667711598746083e-05, |
|
"loss": 0.1391, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 2.3636363636363637e-05, |
|
"loss": 0.2688, |
|
"step": 1682 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 2.360501567398119e-05, |
|
"loss": 0.3939, |
|
"step": 1684 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 2.3573667711598746e-05, |
|
"loss": 0.4107, |
|
"step": 1686 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 2.35423197492163e-05, |
|
"loss": 0.1479, |
|
"step": 1688 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 2.3510971786833858e-05, |
|
"loss": 0.0166, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 2.347962382445141e-05, |
|
"loss": 0.0706, |
|
"step": 1692 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 2.3448275862068967e-05, |
|
"loss": 0.1003, |
|
"step": 1694 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 2.341692789968652e-05, |
|
"loss": 0.2357, |
|
"step": 1696 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 2.3385579937304075e-05, |
|
"loss": 0.3832, |
|
"step": 1698 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 2.335423197492163e-05, |
|
"loss": 0.1109, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 2.3322884012539187e-05, |
|
"loss": 0.0448, |
|
"step": 1702 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 2.3291536050156738e-05, |
|
"loss": 0.5746, |
|
"step": 1704 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 2.3260188087774296e-05, |
|
"loss": 0.1064, |
|
"step": 1706 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 2.322884012539185e-05, |
|
"loss": 0.0285, |
|
"step": 1708 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 2.3197492163009404e-05, |
|
"loss": 0.04, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 2.316614420062696e-05, |
|
"loss": 0.0147, |
|
"step": 1712 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 2.3134796238244516e-05, |
|
"loss": 0.4445, |
|
"step": 1714 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 2.3103448275862067e-05, |
|
"loss": 0.0545, |
|
"step": 1716 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 2.3072100313479625e-05, |
|
"loss": 0.1816, |
|
"step": 1718 |
|
}, |
|
{ |
|
"epoch": 5.39, |
|
"learning_rate": 2.304075235109718e-05, |
|
"loss": 0.3879, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 2.3009404388714734e-05, |
|
"loss": 0.095, |
|
"step": 1722 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 2.2978056426332288e-05, |
|
"loss": 0.2232, |
|
"step": 1724 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 2.2946708463949846e-05, |
|
"loss": 0.3626, |
|
"step": 1726 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 2.2915360501567397e-05, |
|
"loss": 0.0528, |
|
"step": 1728 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 2.2884012539184954e-05, |
|
"loss": 0.1118, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 2.285266457680251e-05, |
|
"loss": 0.2373, |
|
"step": 1732 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 2.2821316614420063e-05, |
|
"loss": 0.2109, |
|
"step": 1734 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 2.2789968652037617e-05, |
|
"loss": 0.7088, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 2.2758620689655175e-05, |
|
"loss": 0.1491, |
|
"step": 1738 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 2.272727272727273e-05, |
|
"loss": 0.0294, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 2.2695924764890284e-05, |
|
"loss": 0.3541, |
|
"step": 1742 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 2.2664576802507838e-05, |
|
"loss": 0.1801, |
|
"step": 1744 |
|
}, |
|
{ |
|
"epoch": 5.47, |
|
"learning_rate": 2.2633228840125392e-05, |
|
"loss": 0.0257, |
|
"step": 1746 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 2.2601880877742947e-05, |
|
"loss": 0.1626, |
|
"step": 1748 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 2.2570532915360504e-05, |
|
"loss": 0.8919, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 2.253918495297806e-05, |
|
"loss": 0.0479, |
|
"step": 1752 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 2.2507836990595613e-05, |
|
"loss": 0.3951, |
|
"step": 1754 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 2.2476489028213167e-05, |
|
"loss": 0.0252, |
|
"step": 1756 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 2.244514106583072e-05, |
|
"loss": 0.3512, |
|
"step": 1758 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 2.2413793103448276e-05, |
|
"loss": 0.4041, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 2.2382445141065833e-05, |
|
"loss": 0.0835, |
|
"step": 1762 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 2.2351097178683388e-05, |
|
"loss": 0.7581, |
|
"step": 1764 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 2.2319749216300942e-05, |
|
"loss": 0.3331, |
|
"step": 1766 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 2.2288401253918496e-05, |
|
"loss": 0.0736, |
|
"step": 1768 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 2.225705329153605e-05, |
|
"loss": 0.1169, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 2.2225705329153605e-05, |
|
"loss": 0.1255, |
|
"step": 1772 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 2.219435736677116e-05, |
|
"loss": 0.1737, |
|
"step": 1774 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 2.2163009404388717e-05, |
|
"loss": 0.0969, |
|
"step": 1776 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 2.213166144200627e-05, |
|
"loss": 0.0086, |
|
"step": 1778 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 2.2100313479623826e-05, |
|
"loss": 0.0897, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 2.206896551724138e-05, |
|
"loss": 0.0578, |
|
"step": 1782 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 2.2037617554858934e-05, |
|
"loss": 0.1835, |
|
"step": 1784 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 2.200626959247649e-05, |
|
"loss": 0.6684, |
|
"step": 1786 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 2.1974921630094046e-05, |
|
"loss": 0.1879, |
|
"step": 1788 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 2.19435736677116e-05, |
|
"loss": 0.356, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 2.1912225705329155e-05, |
|
"loss": 0.4864, |
|
"step": 1792 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 2.188087774294671e-05, |
|
"loss": 0.1917, |
|
"step": 1794 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 2.1849529780564264e-05, |
|
"loss": 0.2009, |
|
"step": 1796 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 2.1818181818181818e-05, |
|
"loss": 0.2977, |
|
"step": 1798 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 2.1786833855799376e-05, |
|
"loss": 0.3496, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 2.175548589341693e-05, |
|
"loss": 0.3726, |
|
"step": 1802 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 2.1724137931034484e-05, |
|
"loss": 0.2824, |
|
"step": 1804 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 2.169278996865204e-05, |
|
"loss": 0.3257, |
|
"step": 1806 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 2.1661442006269593e-05, |
|
"loss": 0.5496, |
|
"step": 1808 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 2.1630094043887147e-05, |
|
"loss": 0.1033, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 2.1598746081504705e-05, |
|
"loss": 0.0302, |
|
"step": 1812 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 2.156739811912226e-05, |
|
"loss": 0.2603, |
|
"step": 1814 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 2.1536050156739813e-05, |
|
"loss": 0.1804, |
|
"step": 1816 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 2.1504702194357368e-05, |
|
"loss": 0.1968, |
|
"step": 1818 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 2.1473354231974925e-05, |
|
"loss": 0.1893, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 2.1442006269592476e-05, |
|
"loss": 0.0244, |
|
"step": 1822 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 2.1410658307210034e-05, |
|
"loss": 0.1275, |
|
"step": 1824 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 2.137931034482759e-05, |
|
"loss": 0.5424, |
|
"step": 1826 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 2.1347962382445143e-05, |
|
"loss": 0.1437, |
|
"step": 1828 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 2.1316614420062697e-05, |
|
"loss": 0.3937, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 2.1285266457680255e-05, |
|
"loss": 0.0505, |
|
"step": 1832 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 2.1253918495297806e-05, |
|
"loss": 0.2163, |
|
"step": 1834 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 2.1222570532915363e-05, |
|
"loss": 0.1776, |
|
"step": 1836 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 2.1191222570532918e-05, |
|
"loss": 0.136, |
|
"step": 1838 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 2.1159874608150472e-05, |
|
"loss": 0.2802, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 2.1128526645768026e-05, |
|
"loss": 0.3908, |
|
"step": 1842 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 2.1097178683385584e-05, |
|
"loss": 0.1034, |
|
"step": 1844 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 2.1065830721003135e-05, |
|
"loss": 0.3721, |
|
"step": 1846 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 2.1034482758620692e-05, |
|
"loss": 0.0378, |
|
"step": 1848 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 2.1003134796238247e-05, |
|
"loss": 0.1305, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 2.09717868338558e-05, |
|
"loss": 0.2223, |
|
"step": 1852 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 2.0940438871473355e-05, |
|
"loss": 0.0197, |
|
"step": 1854 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 2.090909090909091e-05, |
|
"loss": 0.0702, |
|
"step": 1856 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 2.0877742946708464e-05, |
|
"loss": 0.0221, |
|
"step": 1858 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 2.0846394984326022e-05, |
|
"loss": 0.0653, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 2.0815047021943573e-05, |
|
"loss": 0.0245, |
|
"step": 1862 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 2.078369905956113e-05, |
|
"loss": 0.0709, |
|
"step": 1864 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 2.0752351097178685e-05, |
|
"loss": 0.4767, |
|
"step": 1866 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 2.072100313479624e-05, |
|
"loss": 0.2231, |
|
"step": 1868 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 2.0689655172413793e-05, |
|
"loss": 0.0583, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 2.065830721003135e-05, |
|
"loss": 0.0712, |
|
"step": 1872 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 2.0626959247648902e-05, |
|
"loss": 0.2551, |
|
"step": 1874 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 2.059561128526646e-05, |
|
"loss": 0.3786, |
|
"step": 1876 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 2.0564263322884014e-05, |
|
"loss": 0.2306, |
|
"step": 1878 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 2.0532915360501568e-05, |
|
"loss": 0.3652, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 2.0501567398119123e-05, |
|
"loss": 0.2206, |
|
"step": 1882 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 2.047021943573668e-05, |
|
"loss": 0.1188, |
|
"step": 1884 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 2.043887147335423e-05, |
|
"loss": 0.1001, |
|
"step": 1886 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 2.040752351097179e-05, |
|
"loss": 0.0433, |
|
"step": 1888 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 2.0376175548589343e-05, |
|
"loss": 0.1335, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 2.0344827586206897e-05, |
|
"loss": 0.0358, |
|
"step": 1892 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 2.0313479623824452e-05, |
|
"loss": 0.3613, |
|
"step": 1894 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 2.028213166144201e-05, |
|
"loss": 0.1634, |
|
"step": 1896 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 2.025078369905956e-05, |
|
"loss": 0.1517, |
|
"step": 1898 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 2.0219435736677118e-05, |
|
"loss": 0.189, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 2.0188087774294672e-05, |
|
"loss": 0.2292, |
|
"step": 1902 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 2.0156739811912227e-05, |
|
"loss": 0.2509, |
|
"step": 1904 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 2.012539184952978e-05, |
|
"loss": 0.2649, |
|
"step": 1906 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 2.009404388714734e-05, |
|
"loss": 0.1959, |
|
"step": 1908 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 2.006269592476489e-05, |
|
"loss": 0.1861, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 2.0031347962382447e-05, |
|
"loss": 0.0846, |
|
"step": 1912 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 2e-05, |
|
"loss": 0.0477, |
|
"step": 1914 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_bleu": 0.23437903354735942, |
|
"eval_loss": 0.7554587721824646, |
|
"eval_runtime": 76.4452, |
|
"eval_samples_per_second": 4.212, |
|
"eval_steps_per_second": 1.06, |
|
"step": 1914 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 1.9968652037617556e-05, |
|
"loss": 0.1647, |
|
"step": 1916 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 1.993730407523511e-05, |
|
"loss": 0.0463, |
|
"step": 1918 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 1.9905956112852668e-05, |
|
"loss": 0.0195, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 1.987460815047022e-05, |
|
"loss": 0.3331, |
|
"step": 1922 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 1.9843260188087777e-05, |
|
"loss": 0.4867, |
|
"step": 1924 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 1.981191222570533e-05, |
|
"loss": 0.0199, |
|
"step": 1926 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 1.9780564263322885e-05, |
|
"loss": 0.0144, |
|
"step": 1928 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 1.974921630094044e-05, |
|
"loss": 0.3574, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 1.9717868338557997e-05, |
|
"loss": 0.1336, |
|
"step": 1932 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 1.9686520376175548e-05, |
|
"loss": 0.0059, |
|
"step": 1934 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 1.9655172413793106e-05, |
|
"loss": 0.2306, |
|
"step": 1936 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 1.962382445141066e-05, |
|
"loss": 0.1109, |
|
"step": 1938 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 1.9592476489028214e-05, |
|
"loss": 0.3211, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 1.956112852664577e-05, |
|
"loss": 0.0971, |
|
"step": 1942 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 1.9529780564263323e-05, |
|
"loss": 0.0462, |
|
"step": 1944 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 1.9498432601880877e-05, |
|
"loss": 0.0189, |
|
"step": 1946 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 1.9467084639498435e-05, |
|
"loss": 0.1776, |
|
"step": 1948 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 1.9435736677115986e-05, |
|
"loss": 0.1139, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 1.9404388714733544e-05, |
|
"loss": 0.1025, |
|
"step": 1952 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 1.9373040752351098e-05, |
|
"loss": 0.4661, |
|
"step": 1954 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 1.9341692789968652e-05, |
|
"loss": 0.1086, |
|
"step": 1956 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 1.9310344827586207e-05, |
|
"loss": 0.1856, |
|
"step": 1958 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 1.9278996865203764e-05, |
|
"loss": 0.6012, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 1.9247648902821315e-05, |
|
"loss": 0.2758, |
|
"step": 1962 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 1.9216300940438873e-05, |
|
"loss": 0.0415, |
|
"step": 1964 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 1.9184952978056427e-05, |
|
"loss": 0.1377, |
|
"step": 1966 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 1.915360501567398e-05, |
|
"loss": 0.0566, |
|
"step": 1968 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 1.9122257053291536e-05, |
|
"loss": 0.0282, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 1.9090909090909094e-05, |
|
"loss": 0.3328, |
|
"step": 1972 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 1.9059561128526644e-05, |
|
"loss": 0.097, |
|
"step": 1974 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 1.9028213166144202e-05, |
|
"loss": 0.0187, |
|
"step": 1976 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 1.8996865203761756e-05, |
|
"loss": 0.0267, |
|
"step": 1978 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 1.896551724137931e-05, |
|
"loss": 0.0944, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 1.8934169278996865e-05, |
|
"loss": 0.0204, |
|
"step": 1982 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 1.8902821316614423e-05, |
|
"loss": 0.291, |
|
"step": 1984 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 1.8871473354231974e-05, |
|
"loss": 0.1156, |
|
"step": 1986 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 1.884012539184953e-05, |
|
"loss": 0.093, |
|
"step": 1988 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 1.8808777429467086e-05, |
|
"loss": 0.4966, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 1.877742946708464e-05, |
|
"loss": 0.2578, |
|
"step": 1992 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 1.8746081504702194e-05, |
|
"loss": 0.113, |
|
"step": 1994 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 1.8714733542319752e-05, |
|
"loss": 0.0566, |
|
"step": 1996 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 1.8683385579937303e-05, |
|
"loss": 0.0978, |
|
"step": 1998 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 1.865203761755486e-05, |
|
"loss": 0.1104, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 1.8620689655172415e-05, |
|
"loss": 0.2492, |
|
"step": 2002 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 1.858934169278997e-05, |
|
"loss": 0.2796, |
|
"step": 2004 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 1.8557993730407524e-05, |
|
"loss": 0.0134, |
|
"step": 2006 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 1.852664576802508e-05, |
|
"loss": 0.0916, |
|
"step": 2008 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 1.8495297805642632e-05, |
|
"loss": 0.1723, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 1.846394984326019e-05, |
|
"loss": 0.0635, |
|
"step": 2012 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 1.8432601880877744e-05, |
|
"loss": 0.054, |
|
"step": 2014 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 1.84012539184953e-05, |
|
"loss": 0.2052, |
|
"step": 2016 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 1.8369905956112853e-05, |
|
"loss": 0.4444, |
|
"step": 2018 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 1.833855799373041e-05, |
|
"loss": 0.466, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 1.830721003134796e-05, |
|
"loss": 0.1776, |
|
"step": 2022 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 1.827586206896552e-05, |
|
"loss": 0.4567, |
|
"step": 2024 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 1.8244514106583073e-05, |
|
"loss": 0.5138, |
|
"step": 2026 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 1.8213166144200628e-05, |
|
"loss": 0.1979, |
|
"step": 2028 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 1.8181818181818182e-05, |
|
"loss": 0.065, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 1.8150470219435736e-05, |
|
"loss": 0.1527, |
|
"step": 2032 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 1.811912225705329e-05, |
|
"loss": 0.2589, |
|
"step": 2034 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 1.808777429467085e-05, |
|
"loss": 0.0881, |
|
"step": 2036 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 1.80564263322884e-05, |
|
"loss": 0.201, |
|
"step": 2038 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 1.8025078369905957e-05, |
|
"loss": 0.1377, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 1.799373040752351e-05, |
|
"loss": 0.0685, |
|
"step": 2042 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 1.7962382445141066e-05, |
|
"loss": 0.091, |
|
"step": 2044 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 1.793103448275862e-05, |
|
"loss": 0.0452, |
|
"step": 2046 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 1.7899686520376178e-05, |
|
"loss": 0.0162, |
|
"step": 2048 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 1.786833855799373e-05, |
|
"loss": 0.2791, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 1.7836990595611286e-05, |
|
"loss": 0.1512, |
|
"step": 2052 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 1.780564263322884e-05, |
|
"loss": 0.2821, |
|
"step": 2054 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 1.7774294670846395e-05, |
|
"loss": 0.023, |
|
"step": 2056 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 1.774294670846395e-05, |
|
"loss": 0.1239, |
|
"step": 2058 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 1.7711598746081507e-05, |
|
"loss": 0.2945, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 1.7680250783699058e-05, |
|
"loss": 0.2291, |
|
"step": 2062 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 1.7648902821316615e-05, |
|
"loss": 0.3298, |
|
"step": 2064 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 1.761755485893417e-05, |
|
"loss": 0.0826, |
|
"step": 2066 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 1.7586206896551724e-05, |
|
"loss": 0.3214, |
|
"step": 2068 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 1.755485893416928e-05, |
|
"loss": 0.0856, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 1.7523510971786836e-05, |
|
"loss": 0.2695, |
|
"step": 2072 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 1.7492163009404387e-05, |
|
"loss": 0.0738, |
|
"step": 2074 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 1.7460815047021945e-05, |
|
"loss": 0.2538, |
|
"step": 2076 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 1.74294670846395e-05, |
|
"loss": 0.0746, |
|
"step": 2078 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 1.7398119122257053e-05, |
|
"loss": 0.0432, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 1.7366771159874608e-05, |
|
"loss": 0.0558, |
|
"step": 2082 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 1.7335423197492165e-05, |
|
"loss": 0.0303, |
|
"step": 2084 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 1.7304075235109716e-05, |
|
"loss": 0.0962, |
|
"step": 2086 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 1.7272727272727274e-05, |
|
"loss": 0.1523, |
|
"step": 2088 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 1.7241379310344828e-05, |
|
"loss": 0.0355, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 1.7210031347962383e-05, |
|
"loss": 0.0426, |
|
"step": 2092 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 1.7178683385579937e-05, |
|
"loss": 0.0745, |
|
"step": 2094 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 1.7147335423197495e-05, |
|
"loss": 0.0368, |
|
"step": 2096 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 1.7115987460815046e-05, |
|
"loss": 0.2726, |
|
"step": 2098 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 1.7084639498432603e-05, |
|
"loss": 0.1331, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 1.7053291536050158e-05, |
|
"loss": 0.085, |
|
"step": 2102 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 1.7021943573667712e-05, |
|
"loss": 0.3728, |
|
"step": 2104 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 1.6990595611285266e-05, |
|
"loss": 0.1302, |
|
"step": 2106 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 1.6959247648902824e-05, |
|
"loss": 0.2245, |
|
"step": 2108 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 1.6927899686520378e-05, |
|
"loss": 0.0219, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 1.6896551724137932e-05, |
|
"loss": 0.1656, |
|
"step": 2112 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 1.6865203761755487e-05, |
|
"loss": 0.5374, |
|
"step": 2114 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 1.683385579937304e-05, |
|
"loss": 0.0652, |
|
"step": 2116 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 1.6802507836990595e-05, |
|
"loss": 0.1932, |
|
"step": 2118 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"learning_rate": 1.677115987460815e-05, |
|
"loss": 0.0171, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"learning_rate": 1.6739811912225707e-05, |
|
"loss": 0.056, |
|
"step": 2122 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 1.6708463949843262e-05, |
|
"loss": 0.1073, |
|
"step": 2124 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 1.6677115987460816e-05, |
|
"loss": 0.1135, |
|
"step": 2126 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 1.664576802507837e-05, |
|
"loss": 0.0908, |
|
"step": 2128 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 1.6614420062695925e-05, |
|
"loss": 0.2566, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 1.658307210031348e-05, |
|
"loss": 0.4259, |
|
"step": 2132 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 1.6551724137931037e-05, |
|
"loss": 0.1894, |
|
"step": 2134 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 1.652037617554859e-05, |
|
"loss": 0.0222, |
|
"step": 2136 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 1.6489028213166145e-05, |
|
"loss": 0.6408, |
|
"step": 2138 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 1.64576802507837e-05, |
|
"loss": 0.1683, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 1.6426332288401254e-05, |
|
"loss": 0.1006, |
|
"step": 2142 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 1.6394984326018808e-05, |
|
"loss": 0.0377, |
|
"step": 2144 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 1.6363636363636366e-05, |
|
"loss": 0.7774, |
|
"step": 2146 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 1.633228840125392e-05, |
|
"loss": 0.4139, |
|
"step": 2148 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 1.6300940438871475e-05, |
|
"loss": 0.1705, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 1.626959247648903e-05, |
|
"loss": 0.1628, |
|
"step": 2152 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 1.6238244514106583e-05, |
|
"loss": 0.077, |
|
"step": 2154 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 1.6206896551724137e-05, |
|
"loss": 0.0139, |
|
"step": 2156 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 1.6175548589341695e-05, |
|
"loss": 0.2951, |
|
"step": 2158 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 1.614420062695925e-05, |
|
"loss": 0.1851, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 1.6112852664576804e-05, |
|
"loss": 0.0723, |
|
"step": 2162 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 1.6081504702194358e-05, |
|
"loss": 0.2225, |
|
"step": 2164 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 1.6050156739811912e-05, |
|
"loss": 0.7718, |
|
"step": 2166 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 1.6018808777429467e-05, |
|
"loss": 0.1366, |
|
"step": 2168 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 1.5987460815047024e-05, |
|
"loss": 0.1938, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 1.595611285266458e-05, |
|
"loss": 0.9167, |
|
"step": 2172 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 1.5924764890282133e-05, |
|
"loss": 0.1728, |
|
"step": 2174 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 1.5893416927899687e-05, |
|
"loss": 0.1459, |
|
"step": 2176 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 1.586206896551724e-05, |
|
"loss": 0.2455, |
|
"step": 2178 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 1.5830721003134796e-05, |
|
"loss": 0.1353, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 1.5799373040752354e-05, |
|
"loss": 0.6078, |
|
"step": 2182 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 1.5768025078369908e-05, |
|
"loss": 0.1515, |
|
"step": 2184 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 1.5736677115987462e-05, |
|
"loss": 0.0547, |
|
"step": 2186 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 1.5705329153605017e-05, |
|
"loss": 0.4057, |
|
"step": 2188 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 1.5673981191222574e-05, |
|
"loss": 0.3448, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 1.5642633228840125e-05, |
|
"loss": 0.1058, |
|
"step": 2192 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 1.5611285266457683e-05, |
|
"loss": 0.3506, |
|
"step": 2194 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 1.5579937304075237e-05, |
|
"loss": 0.0388, |
|
"step": 2196 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 1.554858934169279e-05, |
|
"loss": 0.2267, |
|
"step": 2198 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 1.5517241379310346e-05, |
|
"loss": 0.1389, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 1.54858934169279e-05, |
|
"loss": 0.0636, |
|
"step": 2202 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 1.5454545454545454e-05, |
|
"loss": 0.0301, |
|
"step": 2204 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 1.5423197492163012e-05, |
|
"loss": 0.3342, |
|
"step": 2206 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 1.5391849529780563e-05, |
|
"loss": 0.0686, |
|
"step": 2208 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 1.536050156739812e-05, |
|
"loss": 0.1338, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 1.5329153605015675e-05, |
|
"loss": 0.0855, |
|
"step": 2212 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 1.529780564263323e-05, |
|
"loss": 0.192, |
|
"step": 2214 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 1.5266457680250784e-05, |
|
"loss": 0.0585, |
|
"step": 2216 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 1.523510971786834e-05, |
|
"loss": 0.0632, |
|
"step": 2218 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 1.5203761755485892e-05, |
|
"loss": 0.0729, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 1.5172413793103448e-05, |
|
"loss": 0.1447, |
|
"step": 2222 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 1.5141065830721004e-05, |
|
"loss": 0.2357, |
|
"step": 2224 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 1.5109717868338557e-05, |
|
"loss": 0.2444, |
|
"step": 2226 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 1.5078369905956113e-05, |
|
"loss": 0.1421, |
|
"step": 2228 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 1.5047021943573669e-05, |
|
"loss": 0.0357, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 1.5015673981191222e-05, |
|
"loss": 0.1374, |
|
"step": 2232 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_bleu": 0.24644981466153415, |
|
"eval_loss": 0.8383769989013672, |
|
"eval_runtime": 75.2008, |
|
"eval_samples_per_second": 4.282, |
|
"eval_steps_per_second": 1.077, |
|
"step": 2233 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 1.4984326018808778e-05, |
|
"loss": 0.0379, |
|
"step": 2234 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 1.4952978056426334e-05, |
|
"loss": 0.2323, |
|
"step": 2236 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 1.4921630094043886e-05, |
|
"loss": 0.1027, |
|
"step": 2238 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 1.4890282131661442e-05, |
|
"loss": 0.0098, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 1.4858934169278998e-05, |
|
"loss": 0.1888, |
|
"step": 2242 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 1.482758620689655e-05, |
|
"loss": 0.0321, |
|
"step": 2244 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 1.4796238244514107e-05, |
|
"loss": 0.1494, |
|
"step": 2246 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 1.4764890282131663e-05, |
|
"loss": 0.0946, |
|
"step": 2248 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 1.4733542319749215e-05, |
|
"loss": 0.0561, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 1.4702194357366771e-05, |
|
"loss": 0.0507, |
|
"step": 2252 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 1.4670846394984327e-05, |
|
"loss": 0.2675, |
|
"step": 2254 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 1.463949843260188e-05, |
|
"loss": 0.0247, |
|
"step": 2256 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 1.4608150470219436e-05, |
|
"loss": 0.1561, |
|
"step": 2258 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 1.4576802507836992e-05, |
|
"loss": 0.0234, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 1.4545454545454545e-05, |
|
"loss": 0.0967, |
|
"step": 2262 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 1.45141065830721e-05, |
|
"loss": 0.0608, |
|
"step": 2264 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 1.4482758620689657e-05, |
|
"loss": 0.0862, |
|
"step": 2266 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 1.445141065830721e-05, |
|
"loss": 0.106, |
|
"step": 2268 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 1.4420062695924765e-05, |
|
"loss": 0.1607, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 1.4388714733542321e-05, |
|
"loss": 0.1719, |
|
"step": 2272 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 1.4357366771159874e-05, |
|
"loss": 0.0395, |
|
"step": 2274 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 1.432601880877743e-05, |
|
"loss": 0.1354, |
|
"step": 2276 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 1.4294670846394986e-05, |
|
"loss": 0.1448, |
|
"step": 2278 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 1.4263322884012539e-05, |
|
"loss": 0.0248, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 1.4231974921630095e-05, |
|
"loss": 0.0896, |
|
"step": 2282 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 1.420062695924765e-05, |
|
"loss": 0.1104, |
|
"step": 2284 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 1.4169278996865205e-05, |
|
"loss": 0.0603, |
|
"step": 2286 |
|
}, |
|
{ |
|
"epoch": 7.17, |
|
"learning_rate": 1.4137931034482759e-05, |
|
"loss": 0.0674, |
|
"step": 2288 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 1.4106583072100313e-05, |
|
"loss": 0.0872, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 1.407523510971787e-05, |
|
"loss": 0.0838, |
|
"step": 2292 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 1.4043887147335424e-05, |
|
"loss": 0.0341, |
|
"step": 2294 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 1.4012539184952978e-05, |
|
"loss": 0.5187, |
|
"step": 2296 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 1.3981191222570534e-05, |
|
"loss": 0.0174, |
|
"step": 2298 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 1.3949843260188088e-05, |
|
"loss": 0.193, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 1.3918495297805643e-05, |
|
"loss": 0.2255, |
|
"step": 2302 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 1.3887147335423199e-05, |
|
"loss": 0.9768, |
|
"step": 2304 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 1.3855799373040753e-05, |
|
"loss": 0.0174, |
|
"step": 2306 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 1.3824451410658307e-05, |
|
"loss": 0.0791, |
|
"step": 2308 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 1.3793103448275863e-05, |
|
"loss": 0.1198, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 1.3761755485893418e-05, |
|
"loss": 0.3075, |
|
"step": 2312 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 1.3730407523510972e-05, |
|
"loss": 0.029, |
|
"step": 2314 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 1.3699059561128528e-05, |
|
"loss": 0.0898, |
|
"step": 2316 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 1.3667711598746082e-05, |
|
"loss": 0.4643, |
|
"step": 2318 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 1.3636363636363637e-05, |
|
"loss": 0.1251, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 1.3605015673981193e-05, |
|
"loss": 0.1889, |
|
"step": 2322 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 1.3573667711598747e-05, |
|
"loss": 0.2009, |
|
"step": 2324 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 1.3542319749216301e-05, |
|
"loss": 0.0354, |
|
"step": 2326 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 1.3510971786833857e-05, |
|
"loss": 0.0464, |
|
"step": 2328 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 1.3479623824451411e-05, |
|
"loss": 0.0486, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 1.3448275862068966e-05, |
|
"loss": 0.044, |
|
"step": 2332 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"learning_rate": 1.3416927899686522e-05, |
|
"loss": 0.0158, |
|
"step": 2334 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"learning_rate": 1.3385579937304076e-05, |
|
"loss": 0.1485, |
|
"step": 2336 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 1.335423197492163e-05, |
|
"loss": 0.0245, |
|
"step": 2338 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 1.3322884012539186e-05, |
|
"loss": 0.16, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 1.329153605015674e-05, |
|
"loss": 0.2687, |
|
"step": 2342 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"learning_rate": 1.3260188087774295e-05, |
|
"loss": 0.1404, |
|
"step": 2344 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"learning_rate": 1.3228840125391851e-05, |
|
"loss": 0.1259, |
|
"step": 2346 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 1.3197492163009405e-05, |
|
"loss": 0.1368, |
|
"step": 2348 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 1.316614420062696e-05, |
|
"loss": 0.0172, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 7.37, |
|
"learning_rate": 1.3134796238244516e-05, |
|
"loss": 0.0425, |
|
"step": 2352 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 1.310344827586207e-05, |
|
"loss": 0.5538, |
|
"step": 2354 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 1.3072100313479624e-05, |
|
"loss": 0.0253, |
|
"step": 2356 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 1.304075235109718e-05, |
|
"loss": 0.0093, |
|
"step": 2358 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 1.3009404388714735e-05, |
|
"loss": 0.1181, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 1.2978056426332289e-05, |
|
"loss": 0.1287, |
|
"step": 2362 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"learning_rate": 1.2946708463949845e-05, |
|
"loss": 0.2848, |
|
"step": 2364 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 1.2915360501567401e-05, |
|
"loss": 0.2026, |
|
"step": 2366 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 1.2884012539184954e-05, |
|
"loss": 0.0273, |
|
"step": 2368 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 1.285266457680251e-05, |
|
"loss": 0.242, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 1.2821316614420062e-05, |
|
"loss": 0.0237, |
|
"step": 2372 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 1.2789968652037618e-05, |
|
"loss": 0.0859, |
|
"step": 2374 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 1.2758620689655174e-05, |
|
"loss": 0.0282, |
|
"step": 2376 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 1.2727272727272727e-05, |
|
"loss": 0.0694, |
|
"step": 2378 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 1.2695924764890283e-05, |
|
"loss": 0.4857, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 1.2664576802507839e-05, |
|
"loss": 0.246, |
|
"step": 2382 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 1.2633228840125391e-05, |
|
"loss": 0.247, |
|
"step": 2384 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"learning_rate": 1.2601880877742947e-05, |
|
"loss": 0.4204, |
|
"step": 2386 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 1.2570532915360503e-05, |
|
"loss": 0.3749, |
|
"step": 2388 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 1.2539184952978056e-05, |
|
"loss": 0.1052, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 1.2507836990595612e-05, |
|
"loss": 0.0413, |
|
"step": 2392 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 1.2476489028213166e-05, |
|
"loss": 0.078, |
|
"step": 2394 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 1.2445141065830722e-05, |
|
"loss": 0.137, |
|
"step": 2396 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 1.2413793103448277e-05, |
|
"loss": 0.0713, |
|
"step": 2398 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 1.2382445141065831e-05, |
|
"loss": 0.3123, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 1.2351097178683387e-05, |
|
"loss": 0.0858, |
|
"step": 2402 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 1.2319749216300941e-05, |
|
"loss": 0.0449, |
|
"step": 2404 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 1.2288401253918496e-05, |
|
"loss": 0.3001, |
|
"step": 2406 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 1.2257053291536052e-05, |
|
"loss": 0.2863, |
|
"step": 2408 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 1.2225705329153606e-05, |
|
"loss": 0.0359, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 1.219435736677116e-05, |
|
"loss": 0.056, |
|
"step": 2412 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 1.2163009404388716e-05, |
|
"loss": 0.3254, |
|
"step": 2414 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 1.213166144200627e-05, |
|
"loss": 0.2419, |
|
"step": 2416 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"learning_rate": 1.2100313479623825e-05, |
|
"loss": 0.1454, |
|
"step": 2418 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 1.206896551724138e-05, |
|
"loss": 0.0976, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 1.2037617554858935e-05, |
|
"loss": 0.0717, |
|
"step": 2422 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 1.200626959247649e-05, |
|
"loss": 0.0459, |
|
"step": 2424 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 1.1974921630094045e-05, |
|
"loss": 0.0737, |
|
"step": 2426 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 1.19435736677116e-05, |
|
"loss": 0.1952, |
|
"step": 2428 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 1.1912225705329154e-05, |
|
"loss": 0.1624, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 1.188087774294671e-05, |
|
"loss": 0.0889, |
|
"step": 2432 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 1.1849529780564264e-05, |
|
"loss": 0.1397, |
|
"step": 2434 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"learning_rate": 1.1818181818181819e-05, |
|
"loss": 0.1465, |
|
"step": 2436 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"learning_rate": 1.1786833855799373e-05, |
|
"loss": 0.2865, |
|
"step": 2438 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 1.1755485893416929e-05, |
|
"loss": 0.1885, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 1.1724137931034483e-05, |
|
"loss": 0.2347, |
|
"step": 2442 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 1.1692789968652038e-05, |
|
"loss": 0.1205, |
|
"step": 2444 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 1.1661442006269594e-05, |
|
"loss": 0.5764, |
|
"step": 2446 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 1.1630094043887148e-05, |
|
"loss": 0.0105, |
|
"step": 2448 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"learning_rate": 1.1598746081504702e-05, |
|
"loss": 0.0425, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 1.1567398119122258e-05, |
|
"loss": 0.1366, |
|
"step": 2452 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 1.1536050156739813e-05, |
|
"loss": 0.0739, |
|
"step": 2454 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 1.1504702194357367e-05, |
|
"loss": 0.0658, |
|
"step": 2456 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 1.1473354231974923e-05, |
|
"loss": 0.4401, |
|
"step": 2458 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 1.1442006269592477e-05, |
|
"loss": 0.2505, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 1.1410658307210031e-05, |
|
"loss": 0.0542, |
|
"step": 2462 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 1.1379310344827587e-05, |
|
"loss": 0.0149, |
|
"step": 2464 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 1.1347962382445142e-05, |
|
"loss": 0.0453, |
|
"step": 2466 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 1.1316614420062696e-05, |
|
"loss": 0.0549, |
|
"step": 2468 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 1.1285266457680252e-05, |
|
"loss": 0.0744, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 1.1253918495297806e-05, |
|
"loss": 0.1326, |
|
"step": 2472 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 1.122257053291536e-05, |
|
"loss": 0.0853, |
|
"step": 2474 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 1.1191222570532917e-05, |
|
"loss": 0.0393, |
|
"step": 2476 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 1.1159874608150471e-05, |
|
"loss": 0.3383, |
|
"step": 2478 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 1.1128526645768025e-05, |
|
"loss": 0.2691, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 1.109717868338558e-05, |
|
"loss": 0.2155, |
|
"step": 2482 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 1.1065830721003136e-05, |
|
"loss": 0.106, |
|
"step": 2484 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 1.103448275862069e-05, |
|
"loss": 0.0392, |
|
"step": 2486 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 1.1003134796238244e-05, |
|
"loss": 0.0198, |
|
"step": 2488 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 1.09717868338558e-05, |
|
"loss": 0.3101, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 1.0940438871473355e-05, |
|
"loss": 0.0062, |
|
"step": 2492 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"learning_rate": 1.0909090909090909e-05, |
|
"loss": 0.0064, |
|
"step": 2494 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"learning_rate": 1.0877742946708465e-05, |
|
"loss": 0.263, |
|
"step": 2496 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 1.084639498432602e-05, |
|
"loss": 0.5857, |
|
"step": 2498 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 1.0815047021943574e-05, |
|
"loss": 0.3092, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 1.078369905956113e-05, |
|
"loss": 0.3466, |
|
"step": 2502 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 1.0752351097178684e-05, |
|
"loss": 0.0788, |
|
"step": 2504 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 1.0721003134796238e-05, |
|
"loss": 0.2137, |
|
"step": 2506 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 1.0689655172413794e-05, |
|
"loss": 0.192, |
|
"step": 2508 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 1.0658307210031348e-05, |
|
"loss": 0.0373, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 1.0626959247648903e-05, |
|
"loss": 0.0247, |
|
"step": 2512 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 1.0595611285266459e-05, |
|
"loss": 0.0209, |
|
"step": 2514 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 1.0564263322884013e-05, |
|
"loss": 0.0277, |
|
"step": 2516 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 1.0532915360501567e-05, |
|
"loss": 0.0101, |
|
"step": 2518 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 1.0501567398119123e-05, |
|
"loss": 0.1634, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 1.0470219435736678e-05, |
|
"loss": 0.1656, |
|
"step": 2522 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 1.0438871473354232e-05, |
|
"loss": 0.3494, |
|
"step": 2524 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 1.0407523510971786e-05, |
|
"loss": 0.0274, |
|
"step": 2526 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 1.0376175548589342e-05, |
|
"loss": 0.3793, |
|
"step": 2528 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 1.0344827586206897e-05, |
|
"loss": 0.0542, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 1.0313479623824451e-05, |
|
"loss": 0.0107, |
|
"step": 2532 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 1.0282131661442007e-05, |
|
"loss": 0.0499, |
|
"step": 2534 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 1.0250783699059561e-05, |
|
"loss": 0.1235, |
|
"step": 2536 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"learning_rate": 1.0219435736677116e-05, |
|
"loss": 0.3413, |
|
"step": 2538 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"learning_rate": 1.0188087774294672e-05, |
|
"loss": 0.4199, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 1.0156739811912226e-05, |
|
"loss": 0.0375, |
|
"step": 2542 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 1.012539184952978e-05, |
|
"loss": 0.0116, |
|
"step": 2544 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 1.0094043887147336e-05, |
|
"loss": 0.0703, |
|
"step": 2546 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 1.006269592476489e-05, |
|
"loss": 0.4828, |
|
"step": 2548 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 1.0031347962382445e-05, |
|
"loss": 0.1091, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 1e-05, |
|
"loss": 0.2584, |
|
"step": 2552 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_bleu": 0.24766209564477684, |
|
"eval_loss": 0.868782639503479, |
|
"eval_runtime": 76.7494, |
|
"eval_samples_per_second": 4.195, |
|
"eval_steps_per_second": 1.055, |
|
"step": 2552 |
|
} |
|
], |
|
"max_steps": 3190, |
|
"num_train_epochs": 10, |
|
"total_flos": 2.3300870872080384e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|