|
{ |
|
"best_metric": 6.38665771484375, |
|
"best_model_checkpoint": "/content/drive/MyDrive/AI\\ Camp/words-detector/checkpoint-800", |
|
"epoch": 3.0, |
|
"global_step": 876, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9942922374429226e-05, |
|
"loss": 17.8686, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9942922374429226e-05, |
|
"loss": 18.2644, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9942922374429226e-05, |
|
"loss": 17.8102, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9942922374429226e-05, |
|
"loss": 18.0033, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9828767123287674e-05, |
|
"loss": 15.0071, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.971461187214612e-05, |
|
"loss": 10.0967, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.960045662100457e-05, |
|
"loss": 9.4344, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.948630136986301e-05, |
|
"loss": 9.3036, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.937214611872146e-05, |
|
"loss": 8.8597, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9257990867579914e-05, |
|
"loss": 8.7451, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.914383561643836e-05, |
|
"loss": 9.0271, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9029680365296804e-05, |
|
"loss": 8.3687, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.891552511415525e-05, |
|
"loss": 8.7046, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.88013698630137e-05, |
|
"loss": 8.2987, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.868721461187215e-05, |
|
"loss": 8.3789, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8573059360730596e-05, |
|
"loss": 7.7792, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.8458904109589044e-05, |
|
"loss": 8.1005, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.834474885844749e-05, |
|
"loss": 8.0337, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.823059360730594e-05, |
|
"loss": 7.8095, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.811643835616438e-05, |
|
"loss": 7.5034, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.8002283105022836e-05, |
|
"loss": 7.6029, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.7888127853881284e-05, |
|
"loss": 7.7257, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.777397260273973e-05, |
|
"loss": 7.4601, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.765981735159817e-05, |
|
"loss": 7.7843, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.754566210045662e-05, |
|
"loss": 7.7357, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.743150684931507e-05, |
|
"loss": 7.1895, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.7317351598173523e-05, |
|
"loss": 7.2758, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.7203196347031965e-05, |
|
"loss": 7.818, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.708904109589041e-05, |
|
"loss": 7.4382, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.697488584474886e-05, |
|
"loss": 7.4169, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.686073059360731e-05, |
|
"loss": 7.5099, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.674657534246576e-05, |
|
"loss": 7.309, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.6632420091324205e-05, |
|
"loss": 7.4911, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.651826484018265e-05, |
|
"loss": 7.595, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.64041095890411e-05, |
|
"loss": 7.3767, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.628995433789954e-05, |
|
"loss": 7.3467, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.617579908675799e-05, |
|
"loss": 7.4988, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.6061643835616445e-05, |
|
"loss": 7.2302, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.594748858447489e-05, |
|
"loss": 7.0727, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.5833333333333334e-05, |
|
"loss": 7.5031, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.571917808219178e-05, |
|
"loss": 7.1357, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.560502283105023e-05, |
|
"loss": 7.1202, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.549086757990868e-05, |
|
"loss": 7.2538, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.5376712328767126e-05, |
|
"loss": 7.071, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.5262557077625574e-05, |
|
"loss": 7.5201, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.514840182648402e-05, |
|
"loss": 7.5082, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.503424657534247e-05, |
|
"loss": 7.1858, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.492009132420091e-05, |
|
"loss": 7.2599, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.4805936073059366e-05, |
|
"loss": 7.3367, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.4691780821917814e-05, |
|
"loss": 6.9713, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"eval_cer": 0.7946802397175929, |
|
"eval_loss": 7.137301445007324, |
|
"eval_runtime": 208.567, |
|
"eval_samples_per_second": 2.795, |
|
"eval_steps_per_second": 0.35, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.457762557077626e-05, |
|
"loss": 6.8404, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.44634703196347e-05, |
|
"loss": 7.0133, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.434931506849315e-05, |
|
"loss": 7.2514, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.42351598173516e-05, |
|
"loss": 7.2344, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.412100456621005e-05, |
|
"loss": 7.1263, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.4006849315068495e-05, |
|
"loss": 7.1429, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.389269406392694e-05, |
|
"loss": 7.422, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.377853881278539e-05, |
|
"loss": 7.2147, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.366438356164384e-05, |
|
"loss": 6.9941, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.355022831050228e-05, |
|
"loss": 7.0812, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.3436073059360735e-05, |
|
"loss": 7.4399, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.332191780821918e-05, |
|
"loss": 6.9418, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.320776255707763e-05, |
|
"loss": 7.0305, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.309360730593607e-05, |
|
"loss": 7.262, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.297945205479452e-05, |
|
"loss": 7.092, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.286529680365297e-05, |
|
"loss": 6.9877, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.275114155251142e-05, |
|
"loss": 6.825, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.2636986301369864e-05, |
|
"loss": 6.6798, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.252283105022831e-05, |
|
"loss": 6.9598, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.240867579908676e-05, |
|
"loss": 6.6182, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.229452054794521e-05, |
|
"loss": 7.0775, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.2180365296803656e-05, |
|
"loss": 7.0996, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.2066210045662104e-05, |
|
"loss": 6.9547, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.195205479452055e-05, |
|
"loss": 6.8326, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.1837899543379e-05, |
|
"loss": 7.1501, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.172374429223744e-05, |
|
"loss": 7.0974, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.160958904109589e-05, |
|
"loss": 6.8626, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.1495433789954344e-05, |
|
"loss": 6.4989, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.138127853881279e-05, |
|
"loss": 6.7234, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.126712328767123e-05, |
|
"loss": 6.7773, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.115296803652968e-05, |
|
"loss": 6.9259, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.103881278538813e-05, |
|
"loss": 7.0744, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.092465753424658e-05, |
|
"loss": 6.8039, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.0810502283105025e-05, |
|
"loss": 6.8271, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.069634703196347e-05, |
|
"loss": 6.7427, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.058219178082192e-05, |
|
"loss": 6.5537, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.046803652968037e-05, |
|
"loss": 6.6001, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.035388127853881e-05, |
|
"loss": 6.6144, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.0239726027397265e-05, |
|
"loss": 6.8667, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.012557077625571e-05, |
|
"loss": 7.0148, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.001141552511416e-05, |
|
"loss": 6.7102, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.98972602739726e-05, |
|
"loss": 6.8212, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.978310502283105e-05, |
|
"loss": 6.6284, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.96689497716895e-05, |
|
"loss": 6.5087, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.9554794520547946e-05, |
|
"loss": 7.1345, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.9440639269406394e-05, |
|
"loss": 7.0863, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.932648401826484e-05, |
|
"loss": 6.8418, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.921232876712329e-05, |
|
"loss": 7.0545, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.909817351598174e-05, |
|
"loss": 7.1057, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8984018264840186e-05, |
|
"loss": 6.6551, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"eval_cer": 0.8836712913553896, |
|
"eval_loss": 6.700944900512695, |
|
"eval_runtime": 40.0981, |
|
"eval_samples_per_second": 14.539, |
|
"eval_steps_per_second": 1.821, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.8869863013698634e-05, |
|
"loss": 6.7477, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.875570776255708e-05, |
|
"loss": 6.9312, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.864155251141553e-05, |
|
"loss": 6.6784, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.852739726027397e-05, |
|
"loss": 6.9644, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.841324200913242e-05, |
|
"loss": 6.5752, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.829908675799087e-05, |
|
"loss": 7.0168, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.818493150684932e-05, |
|
"loss": 6.7026, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.8070776255707764e-05, |
|
"loss": 6.8185, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.795662100456621e-05, |
|
"loss": 6.7238, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.784246575342466e-05, |
|
"loss": 6.7133, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.772831050228311e-05, |
|
"loss": 6.771, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.7614155251141555e-05, |
|
"loss": 6.8149, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 6.6512, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.738584474885845e-05, |
|
"loss": 6.213, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.727168949771689e-05, |
|
"loss": 6.7743, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.715753424657534e-05, |
|
"loss": 6.7843, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.704337899543379e-05, |
|
"loss": 6.6052, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.6929223744292243e-05, |
|
"loss": 6.5319, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.6815068493150685e-05, |
|
"loss": 6.4869, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.670091324200913e-05, |
|
"loss": 6.7245, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.658675799086758e-05, |
|
"loss": 6.8325, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.647260273972603e-05, |
|
"loss": 6.6019, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.635844748858448e-05, |
|
"loss": 6.2614, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.6244292237442925e-05, |
|
"loss": 7.0083, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.613013698630137e-05, |
|
"loss": 6.4853, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.601598173515982e-05, |
|
"loss": 6.7758, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.590182648401826e-05, |
|
"loss": 6.7792, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.578767123287671e-05, |
|
"loss": 6.6991, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.5673515981735165e-05, |
|
"loss": 6.5657, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.555936073059361e-05, |
|
"loss": 6.7322, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.5445205479452054e-05, |
|
"loss": 6.4798, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.53310502283105e-05, |
|
"loss": 6.7884, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.521689497716895e-05, |
|
"loss": 6.424, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.51027397260274e-05, |
|
"loss": 6.6925, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4988584474885846e-05, |
|
"loss": 7.0121, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.4874429223744294e-05, |
|
"loss": 6.8289, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.476027397260274e-05, |
|
"loss": 6.7504, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.464611872146119e-05, |
|
"loss": 6.6898, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.453196347031963e-05, |
|
"loss": 6.7379, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.4417808219178086e-05, |
|
"loss": 6.4448, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.4303652968036534e-05, |
|
"loss": 6.678, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.418949771689498e-05, |
|
"loss": 6.4519, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.407534246575342e-05, |
|
"loss": 6.5809, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.396118721461187e-05, |
|
"loss": 6.5462, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.384703196347032e-05, |
|
"loss": 6.4562, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.373287671232877e-05, |
|
"loss": 6.5566, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.3618721461187215e-05, |
|
"loss": 6.4929, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.350456621004566e-05, |
|
"loss": 6.5228, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.339041095890411e-05, |
|
"loss": 6.6275, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.327625570776256e-05, |
|
"loss": 6.2089, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_cer": 0.7978408997619243, |
|
"eval_loss": 6.618570327758789, |
|
"eval_runtime": 34.5444, |
|
"eval_samples_per_second": 16.877, |
|
"eval_steps_per_second": 2.113, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.316210045662101e-05, |
|
"loss": 6.3685, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.3047945205479455e-05, |
|
"loss": 6.1731, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.29337899543379e-05, |
|
"loss": 6.359, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.281963470319635e-05, |
|
"loss": 6.2789, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.270547945205479e-05, |
|
"loss": 6.2938, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.259132420091324e-05, |
|
"loss": 6.3706, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.247716894977169e-05, |
|
"loss": 6.4628, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.236301369863014e-05, |
|
"loss": 5.9578, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.2248858447488584e-05, |
|
"loss": 6.3769, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.213470319634703e-05, |
|
"loss": 6.7954, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.202054794520548e-05, |
|
"loss": 6.7632, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.190639269406393e-05, |
|
"loss": 6.3459, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.1792237442922376e-05, |
|
"loss": 6.4654, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.1678082191780824e-05, |
|
"loss": 6.6141, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.156392694063927e-05, |
|
"loss": 6.2931, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.144977168949772e-05, |
|
"loss": 5.9657, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.133561643835616e-05, |
|
"loss": 6.6218, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.122146118721461e-05, |
|
"loss": 6.2199, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.1107305936073064e-05, |
|
"loss": 6.5139, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.099315068493151e-05, |
|
"loss": 6.5002, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.087899543378995e-05, |
|
"loss": 6.3457, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.07648401826484e-05, |
|
"loss": 6.1393, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.065068493150685e-05, |
|
"loss": 6.0003, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.05365296803653e-05, |
|
"loss": 6.237, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.0422374429223742e-05, |
|
"loss": 6.4162, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.0308219178082193e-05, |
|
"loss": 6.4468, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.019406392694064e-05, |
|
"loss": 6.6031, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.007990867579909e-05, |
|
"loss": 6.3145, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9965753424657534e-05, |
|
"loss": 6.3375, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9851598173515982e-05, |
|
"loss": 6.4769, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.973744292237443e-05, |
|
"loss": 6.0631, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.962328767123288e-05, |
|
"loss": 6.2347, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9509132420091322e-05, |
|
"loss": 6.0945, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.9394977168949774e-05, |
|
"loss": 6.2858, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.9280821917808222e-05, |
|
"loss": 6.3601, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.916666666666667e-05, |
|
"loss": 6.3449, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.9052511415525114e-05, |
|
"loss": 6.3585, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.8938356164383562e-05, |
|
"loss": 6.0235, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.882420091324201e-05, |
|
"loss": 6.1927, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.871004566210046e-05, |
|
"loss": 6.4136, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8595890410958903e-05, |
|
"loss": 6.4073, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.848173515981735e-05, |
|
"loss": 6.4599, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.8367579908675802e-05, |
|
"loss": 6.1341, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.825342465753425e-05, |
|
"loss": 6.0782, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.813926940639269e-05, |
|
"loss": 6.1504, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.8025114155251143e-05, |
|
"loss": 6.3133, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.791095890410959e-05, |
|
"loss": 6.4356, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.779680365296804e-05, |
|
"loss": 6.4857, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7682648401826484e-05, |
|
"loss": 6.4011, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.756849315068493e-05, |
|
"loss": 6.1637, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"eval_cer": 0.9765208110992529, |
|
"eval_loss": 6.528327941894531, |
|
"eval_runtime": 37.1998, |
|
"eval_samples_per_second": 15.672, |
|
"eval_steps_per_second": 1.962, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.745433789954338e-05, |
|
"loss": 6.3984, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.734018264840183e-05, |
|
"loss": 6.0221, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.7226027397260272e-05, |
|
"loss": 6.2472, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.7111872146118723e-05, |
|
"loss": 6.333, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.699771689497717e-05, |
|
"loss": 6.2516, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.688356164383562e-05, |
|
"loss": 6.2314, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6769406392694064e-05, |
|
"loss": 6.1203, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6655251141552512e-05, |
|
"loss": 6.311, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.654109589041096e-05, |
|
"loss": 6.1741, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.6426940639269408e-05, |
|
"loss": 6.0226, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.6312785388127853e-05, |
|
"loss": 6.1573, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.61986301369863e-05, |
|
"loss": 6.3298, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.6084474885844752e-05, |
|
"loss": 6.3197, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.59703196347032e-05, |
|
"loss": 6.3265, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5856164383561645e-05, |
|
"loss": 6.5904, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5742009132420093e-05, |
|
"loss": 6.0295, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.562785388127854e-05, |
|
"loss": 6.2887, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.551369863013699e-05, |
|
"loss": 6.2315, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.5399543378995433e-05, |
|
"loss": 6.1131, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.528538812785388e-05, |
|
"loss": 6.0897, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.517123287671233e-05, |
|
"loss": 6.2543, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.505707762557078e-05, |
|
"loss": 6.2021, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4942922374429225e-05, |
|
"loss": 6.2414, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4828767123287673e-05, |
|
"loss": 6.3754, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.471461187214612e-05, |
|
"loss": 6.4199, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4600456621004566e-05, |
|
"loss": 6.1184, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4486301369863017e-05, |
|
"loss": 6.308, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.4372146118721462e-05, |
|
"loss": 6.4365, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.425799086757991e-05, |
|
"loss": 6.0473, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.4143835616438358e-05, |
|
"loss": 6.055, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.4029680365296806e-05, |
|
"loss": 6.0705, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.391552511415525e-05, |
|
"loss": 6.0425, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3801369863013702e-05, |
|
"loss": 5.9382, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.3687214611872146e-05, |
|
"loss": 6.0501, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3573059360730594e-05, |
|
"loss": 6.1394, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.3458904109589042e-05, |
|
"loss": 6.4409, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.334474885844749e-05, |
|
"loss": 6.1642, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.3230593607305935e-05, |
|
"loss": 6.1099, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.3116438356164386e-05, |
|
"loss": 6.1933, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.300228310502283e-05, |
|
"loss": 6.2047, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.288812785388128e-05, |
|
"loss": 6.0044, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2773972602739727e-05, |
|
"loss": 6.0892, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2659817351598175e-05, |
|
"loss": 6.245, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2545662100456623e-05, |
|
"loss": 6.1838, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.243150684931507e-05, |
|
"loss": 5.7336, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.2317351598173515e-05, |
|
"loss": 6.1649, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.2203196347031967e-05, |
|
"loss": 6.001, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.208904109589041e-05, |
|
"loss": 6.6607, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.197488584474886e-05, |
|
"loss": 5.9693, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.1860730593607307e-05, |
|
"loss": 6.2159, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"eval_cer": 0.8505459321894754, |
|
"eval_loss": 6.468786716461182, |
|
"eval_runtime": 38.2571, |
|
"eval_samples_per_second": 15.239, |
|
"eval_steps_per_second": 1.908, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1746575342465755e-05, |
|
"loss": 6.5233, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.16324200913242e-05, |
|
"loss": 6.0243, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.151826484018265e-05, |
|
"loss": 6.0025, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.1404109589041096e-05, |
|
"loss": 6.19, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.1289954337899544e-05, |
|
"loss": 6.1283, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.1175799086757992e-05, |
|
"loss": 5.9461, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.106164383561644e-05, |
|
"loss": 6.163, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0947488584474888e-05, |
|
"loss": 5.9075, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0833333333333336e-05, |
|
"loss": 6.061, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.071917808219178e-05, |
|
"loss": 5.8001, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.060502283105023e-05, |
|
"loss": 6.3543, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0490867579908677e-05, |
|
"loss": 6.1044, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.0376712328767125e-05, |
|
"loss": 6.0392, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 2.0262557077625573e-05, |
|
"loss": 6.3613, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 2.014840182648402e-05, |
|
"loss": 6.1969, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 2.0034246575342465e-05, |
|
"loss": 6.2186, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9920091324200917e-05, |
|
"loss": 6.5516, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.980593607305936e-05, |
|
"loss": 6.2319, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.969178082191781e-05, |
|
"loss": 6.1139, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9577625570776257e-05, |
|
"loss": 6.6036, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.9463470319634705e-05, |
|
"loss": 6.3099, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.934931506849315e-05, |
|
"loss": 6.2986, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.92351598173516e-05, |
|
"loss": 6.0614, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.9121004566210046e-05, |
|
"loss": 5.8707, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.9006849315068494e-05, |
|
"loss": 6.4741, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8892694063926942e-05, |
|
"loss": 5.9351, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.877853881278539e-05, |
|
"loss": 6.3108, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8664383561643838e-05, |
|
"loss": 5.8719, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8550228310502286e-05, |
|
"loss": 5.9978, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.843607305936073e-05, |
|
"loss": 5.9883, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.832191780821918e-05, |
|
"loss": 5.7932, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.8207762557077626e-05, |
|
"loss": 6.1634, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.8093607305936074e-05, |
|
"loss": 6.2713, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7979452054794522e-05, |
|
"loss": 6.2679, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.786529680365297e-05, |
|
"loss": 6.2969, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7751141552511415e-05, |
|
"loss": 6.0192, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.7636986301369866e-05, |
|
"loss": 5.9912, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.752283105022831e-05, |
|
"loss": 6.2804, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.740867579908676e-05, |
|
"loss": 6.1972, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.7294520547945207e-05, |
|
"loss": 6.0236, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.7180365296803655e-05, |
|
"loss": 5.9499, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.70662100456621e-05, |
|
"loss": 6.1688, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.695205479452055e-05, |
|
"loss": 5.9289, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.6837899543378995e-05, |
|
"loss": 5.5866, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6723744292237443e-05, |
|
"loss": 6.2259, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.660958904109589e-05, |
|
"loss": 5.9349, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.649543378995434e-05, |
|
"loss": 6.0916, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.6381278538812787e-05, |
|
"loss": 5.8322, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.6267123287671235e-05, |
|
"loss": 6.1475, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.615296803652968e-05, |
|
"loss": 6.1342, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"eval_cer": 0.8386831951399721, |
|
"eval_loss": 6.42148494720459, |
|
"eval_runtime": 43.5835, |
|
"eval_samples_per_second": 13.377, |
|
"eval_steps_per_second": 1.675, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.603881278538813e-05, |
|
"loss": 5.7958, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.5924657534246576e-05, |
|
"loss": 6.3319, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5810502283105024e-05, |
|
"loss": 6.181, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5696347031963472e-05, |
|
"loss": 6.1645, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.558219178082192e-05, |
|
"loss": 5.8628, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.5468036529680365e-05, |
|
"loss": 6.1039, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.5353881278538816e-05, |
|
"loss": 6.196, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.523972602739726e-05, |
|
"loss": 6.0791, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.512557077625571e-05, |
|
"loss": 6.2871, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.5011415525114157e-05, |
|
"loss": 5.8731, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4897260273972605e-05, |
|
"loss": 6.1108, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4783105022831051e-05, |
|
"loss": 5.9225, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4668949771689499e-05, |
|
"loss": 6.1894, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4554794520547945e-05, |
|
"loss": 5.7433, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.4440639269406395e-05, |
|
"loss": 6.0826, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.4326484018264841e-05, |
|
"loss": 6.0097, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.421232876712329e-05, |
|
"loss": 5.8454, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.4098173515981735e-05, |
|
"loss": 6.1941, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3984018264840185e-05, |
|
"loss": 5.8484, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3869863013698631e-05, |
|
"loss": 6.328, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.375570776255708e-05, |
|
"loss": 6.3417, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3641552511415526e-05, |
|
"loss": 6.1993, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3527397260273974e-05, |
|
"loss": 6.0958, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.341324200913242e-05, |
|
"loss": 6.0639, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.329908675799087e-05, |
|
"loss": 6.0299, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.3184931506849316e-05, |
|
"loss": 6.1234, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.3070776255707764e-05, |
|
"loss": 6.0303, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.295662100456621e-05, |
|
"loss": 6.0098, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.284246575342466e-05, |
|
"loss": 5.9058, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2728310502283106e-05, |
|
"loss": 6.2086, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2614155251141554e-05, |
|
"loss": 6.1667, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.25e-05, |
|
"loss": 6.26, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.2385844748858449e-05, |
|
"loss": 5.698, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.2271689497716895e-05, |
|
"loss": 6.0933, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.2157534246575343e-05, |
|
"loss": 6.0448, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.2043378995433791e-05, |
|
"loss": 6.1017, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.1929223744292239e-05, |
|
"loss": 6.2529, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1815068493150685e-05, |
|
"loss": 5.9125, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1700913242009133e-05, |
|
"loss": 6.0414, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.1586757990867581e-05, |
|
"loss": 6.1012, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1472602739726027e-05, |
|
"loss": 6.1578, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1358447488584475e-05, |
|
"loss": 6.1924, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.1244292237442923e-05, |
|
"loss": 5.9418, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.113013698630137e-05, |
|
"loss": 6.0778, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.1015981735159818e-05, |
|
"loss": 6.1784, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0901826484018266e-05, |
|
"loss": 6.2346, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.0787671232876714e-05, |
|
"loss": 6.0849, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.067351598173516e-05, |
|
"loss": 6.0025, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0559360730593608e-05, |
|
"loss": 6.2221, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.0445205479452056e-05, |
|
"loss": 5.8295, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"eval_cer": 0.7918069124045645, |
|
"eval_loss": 6.3999199867248535, |
|
"eval_runtime": 40.7886, |
|
"eval_samples_per_second": 14.293, |
|
"eval_steps_per_second": 1.79, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.0331050228310502e-05, |
|
"loss": 5.8848, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.021689497716895e-05, |
|
"loss": 6.0501, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.0102739726027398e-05, |
|
"loss": 6.1242, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.988584474885845e-06, |
|
"loss": 5.9263, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.874429223744293e-06, |
|
"loss": 5.897, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.76027397260274e-06, |
|
"loss": 6.3139, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.646118721461189e-06, |
|
"loss": 5.9507, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.531963470319635e-06, |
|
"loss": 5.9616, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.417808219178083e-06, |
|
"loss": 6.0359, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 9.303652968036531e-06, |
|
"loss": 5.8949, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 9.189497716894977e-06, |
|
"loss": 6.1793, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 9.075342465753425e-06, |
|
"loss": 5.9515, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.961187214611873e-06, |
|
"loss": 5.8504, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.84703196347032e-06, |
|
"loss": 6.0409, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.732876712328767e-06, |
|
"loss": 6.2968, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.618721461187215e-06, |
|
"loss": 5.8559, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.504566210045663e-06, |
|
"loss": 5.7911, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.39041095890411e-06, |
|
"loss": 6.1524, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 8.276255707762558e-06, |
|
"loss": 5.7204, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 8.162100456621006e-06, |
|
"loss": 6.1016, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 8.047945205479452e-06, |
|
"loss": 5.9837, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.9337899543379e-06, |
|
"loss": 5.8908, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.819634703196348e-06, |
|
"loss": 5.843, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.705479452054794e-06, |
|
"loss": 5.9089, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.591324200913242e-06, |
|
"loss": 6.0063, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.47716894977169e-06, |
|
"loss": 5.9461, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.3630136986301374e-06, |
|
"loss": 5.6393, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 7.2488584474885854e-06, |
|
"loss": 6.0984, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 7.134703196347033e-06, |
|
"loss": 5.7783, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 7.02054794520548e-06, |
|
"loss": 5.8534, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.906392694063928e-06, |
|
"loss": 5.9853, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.792237442922375e-06, |
|
"loss": 6.2198, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.678082191780823e-06, |
|
"loss": 6.1982, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.56392694063927e-06, |
|
"loss": 5.9753, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.449771689497717e-06, |
|
"loss": 5.806, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.335616438356165e-06, |
|
"loss": 5.8563, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 6.2214611872146115e-06, |
|
"loss": 6.0158, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.1073059360730594e-06, |
|
"loss": 6.2569, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.993150684931507e-06, |
|
"loss": 5.9379, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.878995433789955e-06, |
|
"loss": 5.914, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.764840182648402e-06, |
|
"loss": 5.847, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.650684931506849e-06, |
|
"loss": 5.9925, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.536529680365297e-06, |
|
"loss": 5.7515, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.422374429223744e-06, |
|
"loss": 6.0283, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.308219178082192e-06, |
|
"loss": 5.8207, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.194063926940639e-06, |
|
"loss": 5.9529, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 5.079908675799086e-06, |
|
"loss": 6.3816, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.965753424657534e-06, |
|
"loss": 5.921, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.8515981735159814e-06, |
|
"loss": 5.9369, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.7374429223744294e-06, |
|
"loss": 6.0542, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"eval_cer": 0.7485838601100074, |
|
"eval_loss": 6.38665771484375, |
|
"eval_runtime": 43.0264, |
|
"eval_samples_per_second": 13.55, |
|
"eval_steps_per_second": 1.697, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.623287671232877e-06, |
|
"loss": 5.6539, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.509132420091325e-06, |
|
"loss": 6.0837, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.394977168949772e-06, |
|
"loss": 5.5705, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.280821917808219e-06, |
|
"loss": 5.797, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 6.2038, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 4.052511415525114e-06, |
|
"loss": 5.8418, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.938356164383562e-06, |
|
"loss": 5.9495, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.824200913242009e-06, |
|
"loss": 6.2109, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.7100456621004567e-06, |
|
"loss": 5.8276, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.5958904109589043e-06, |
|
"loss": 5.8859, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.4817351598173514e-06, |
|
"loss": 5.9916, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.367579908675799e-06, |
|
"loss": 6.2421, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.2534246575342466e-06, |
|
"loss": 5.9774, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 3.139269406392694e-06, |
|
"loss": 5.6032, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 3.0251141552511417e-06, |
|
"loss": 6.0698, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.910958904109589e-06, |
|
"loss": 6.1411, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.7968036529680364e-06, |
|
"loss": 6.0411, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.682648401826484e-06, |
|
"loss": 5.9259, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.5684931506849316e-06, |
|
"loss": 6.1342, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.454337899543379e-06, |
|
"loss": 5.9385, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.3401826484018263e-06, |
|
"loss": 6.1046, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 2.226027397260274e-06, |
|
"loss": 5.5534, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 2.1118721461187214e-06, |
|
"loss": 6.0064, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.997716894977169e-06, |
|
"loss": 5.9357, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.8835616438356164e-06, |
|
"loss": 5.591, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.769406392694064e-06, |
|
"loss": 6.0179, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.6552511415525115e-06, |
|
"loss": 6.0487, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.5410958904109589e-06, |
|
"loss": 6.0967, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.4269406392694064e-06, |
|
"loss": 6.1703, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.3127853881278538e-06, |
|
"loss": 5.8776, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.1986301369863014e-06, |
|
"loss": 5.8851, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.084474885844749e-06, |
|
"loss": 5.7484, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 9.703196347031963e-07, |
|
"loss": 6.1194, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 8.561643835616439e-07, |
|
"loss": 5.8983, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 7.420091324200913e-07, |
|
"loss": 6.2007, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 6.278538812785388e-07, |
|
"loss": 5.9643, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 5.136986301369863e-07, |
|
"loss": 5.7904, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 3.9954337899543377e-07, |
|
"loss": 5.9321, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 876, |
|
"total_flos": 8.368943131860664e+17, |
|
"train_loss": 6.569673863720132, |
|
"train_runtime": 1471.7945, |
|
"train_samples_per_second": 4.753, |
|
"train_steps_per_second": 0.595 |
|
} |
|
], |
|
"max_steps": 876, |
|
"num_train_epochs": 3, |
|
"total_flos": 8.368943131860664e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|