|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.0, |
|
"global_step": 265, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9811320754716985e-05, |
|
"loss": 18.4602, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.962264150943397e-05, |
|
"loss": 16.3339, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.943396226415095e-05, |
|
"loss": 14.7128, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9245283018867924e-05, |
|
"loss": 13.1405, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9056603773584906e-05, |
|
"loss": 12.334, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.886792452830189e-05, |
|
"loss": 11.5351, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.867924528301887e-05, |
|
"loss": 11.2585, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.849056603773585e-05, |
|
"loss": 10.6678, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.8301886792452835e-05, |
|
"loss": 10.3116, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.811320754716982e-05, |
|
"loss": 10.1386, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.792452830188679e-05, |
|
"loss": 9.7791, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.7735849056603775e-05, |
|
"loss": 9.4858, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.754716981132076e-05, |
|
"loss": 9.4354, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.735849056603774e-05, |
|
"loss": 9.1703, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.716981132075472e-05, |
|
"loss": 9.0127, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.6981132075471704e-05, |
|
"loss": 8.978, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.679245283018868e-05, |
|
"loss": 8.8674, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.660377358490566e-05, |
|
"loss": 8.7663, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.641509433962264e-05, |
|
"loss": 8.8133, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.6226415094339625e-05, |
|
"loss": 8.5456, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.603773584905661e-05, |
|
"loss": 8.4904, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.584905660377359e-05, |
|
"loss": 8.4489, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.566037735849057e-05, |
|
"loss": 8.4346, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.547169811320755e-05, |
|
"loss": 8.1324, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.528301886792453e-05, |
|
"loss": 8.1567, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.509433962264151e-05, |
|
"loss": 8.1832, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.4905660377358494e-05, |
|
"loss": 8.1429, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.4716981132075476e-05, |
|
"loss": 7.9504, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.452830188679246e-05, |
|
"loss": 7.9522, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.433962264150944e-05, |
|
"loss": 7.7705, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.4150943396226416e-05, |
|
"loss": 7.7457, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.39622641509434e-05, |
|
"loss": 7.8053, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.377358490566038e-05, |
|
"loss": 7.7519, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.358490566037736e-05, |
|
"loss": 7.8011, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.3396226415094345e-05, |
|
"loss": 7.7119, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.320754716981133e-05, |
|
"loss": 7.4472, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.301886792452831e-05, |
|
"loss": 7.5688, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.2830188679245284e-05, |
|
"loss": 7.3592, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.2641509433962266e-05, |
|
"loss": 7.4869, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.245283018867925e-05, |
|
"loss": 7.3489, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.226415094339623e-05, |
|
"loss": 7.412, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.207547169811321e-05, |
|
"loss": 7.2706, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.1886792452830195e-05, |
|
"loss": 7.2465, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.169811320754717e-05, |
|
"loss": 7.337, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.150943396226415e-05, |
|
"loss": 7.2097, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.1320754716981135e-05, |
|
"loss": 7.2128, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.113207547169812e-05, |
|
"loss": 7.2417, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.09433962264151e-05, |
|
"loss": 7.2044, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.075471698113208e-05, |
|
"loss": 7.1435, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.0566037735849064e-05, |
|
"loss": 7.075, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.037735849056604e-05, |
|
"loss": 6.9571, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.018867924528302e-05, |
|
"loss": 6.7984, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4e-05, |
|
"loss": 6.7736, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.9811320754716985e-05, |
|
"loss": 6.8625, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.962264150943397e-05, |
|
"loss": 7.0221, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.943396226415095e-05, |
|
"loss": 6.969, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.924528301886793e-05, |
|
"loss": 6.9754, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.905660377358491e-05, |
|
"loss": 6.9933, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.886792452830189e-05, |
|
"loss": 6.9458, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.867924528301887e-05, |
|
"loss": 6.9104, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.8490566037735854e-05, |
|
"loss": 6.809, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.8301886792452836e-05, |
|
"loss": 6.7712, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.811320754716982e-05, |
|
"loss": 6.671, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.7924528301886794e-05, |
|
"loss": 6.7017, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.7735849056603776e-05, |
|
"loss": 6.7152, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.754716981132076e-05, |
|
"loss": 6.7993, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.735849056603773e-05, |
|
"loss": 6.6646, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.7169811320754716e-05, |
|
"loss": 6.6046, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.69811320754717e-05, |
|
"loss": 6.6799, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 3.679245283018868e-05, |
|
"loss": 6.5709, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 3.660377358490566e-05, |
|
"loss": 6.5575, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 3.641509433962264e-05, |
|
"loss": 6.8022, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 3.622641509433962e-05, |
|
"loss": 6.6274, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 3.60377358490566e-05, |
|
"loss": 6.519, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 3.5849056603773584e-05, |
|
"loss": 6.4789, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.5660377358490566e-05, |
|
"loss": 6.5077, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.547169811320755e-05, |
|
"loss": 6.4625, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.528301886792453e-05, |
|
"loss": 6.6545, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.5094339622641506e-05, |
|
"loss": 6.58, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 3.490566037735849e-05, |
|
"loss": 6.7074, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 3.471698113207547e-05, |
|
"loss": 6.5293, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 3.452830188679245e-05, |
|
"loss": 6.4572, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 3.4339622641509435e-05, |
|
"loss": 6.4919, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 3.415094339622642e-05, |
|
"loss": 6.4721, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 3.39622641509434e-05, |
|
"loss": 6.4396, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 3.3773584905660374e-05, |
|
"loss": 6.4422, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.3584905660377356e-05, |
|
"loss": 6.6386, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 3.339622641509434e-05, |
|
"loss": 6.3216, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 3.320754716981132e-05, |
|
"loss": 6.5511, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 3.30188679245283e-05, |
|
"loss": 6.3281, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 3.2830188679245285e-05, |
|
"loss": 6.3153, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 3.264150943396227e-05, |
|
"loss": 6.3444, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 3.245283018867924e-05, |
|
"loss": 6.4761, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 3.2264150943396225e-05, |
|
"loss": 6.4824, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 3.207547169811321e-05, |
|
"loss": 6.4817, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 3.188679245283019e-05, |
|
"loss": 6.4784, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.169811320754717e-05, |
|
"loss": 6.3177, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 3.1509433962264154e-05, |
|
"loss": 6.304, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 3.132075471698113e-05, |
|
"loss": 6.176, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 3.113207547169811e-05, |
|
"loss": 6.3501, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 3.094339622641509e-05, |
|
"loss": 6.1981, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 3.0754716981132075e-05, |
|
"loss": 6.2568, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 3.056603773584906e-05, |
|
"loss": 6.3171, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 3.0377358490566036e-05, |
|
"loss": 6.2815, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 3.018867924528302e-05, |
|
"loss": 6.2513, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 3e-05, |
|
"loss": 6.0677, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 2.9811320754716983e-05, |
|
"loss": 6.3859, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 2.9622641509433962e-05, |
|
"loss": 6.2803, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 2.9433962264150944e-05, |
|
"loss": 6.4167, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 2.9245283018867926e-05, |
|
"loss": 6.234, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 2.9056603773584905e-05, |
|
"loss": 6.3275, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 2.8867924528301887e-05, |
|
"loss": 6.2766, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 2.867924528301887e-05, |
|
"loss": 6.3146, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 2.8490566037735848e-05, |
|
"loss": 6.1047, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 2.830188679245283e-05, |
|
"loss": 6.1752, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.8113207547169812e-05, |
|
"loss": 6.2028, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 2.7924528301886794e-05, |
|
"loss": 6.0047, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 2.7735849056603773e-05, |
|
"loss": 6.0241, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 2.7547169811320755e-05, |
|
"loss": 6.25, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.7358490566037738e-05, |
|
"loss": 6.2054, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.7169811320754716e-05, |
|
"loss": 6.1963, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.69811320754717e-05, |
|
"loss": 6.1233, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.679245283018868e-05, |
|
"loss": 6.1879, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.6603773584905663e-05, |
|
"loss": 6.1532, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.641509433962264e-05, |
|
"loss": 5.8332, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.6226415094339624e-05, |
|
"loss": 5.9921, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.6037735849056606e-05, |
|
"loss": 6.08, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.5849056603773585e-05, |
|
"loss": 5.9691, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 2.5660377358490567e-05, |
|
"loss": 5.8631, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 2.547169811320755e-05, |
|
"loss": 5.9265, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 2.5283018867924528e-05, |
|
"loss": 6.0456, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 2.509433962264151e-05, |
|
"loss": 6.0374, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 2.4905660377358492e-05, |
|
"loss": 6.0419, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 2.4716981132075474e-05, |
|
"loss": 5.9543, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 2.4528301886792453e-05, |
|
"loss": 6.0268, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 2.4339622641509435e-05, |
|
"loss": 5.9925, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 2.4150943396226418e-05, |
|
"loss": 6.16, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 2.3962264150943396e-05, |
|
"loss": 5.9924, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 2.377358490566038e-05, |
|
"loss": 5.8427, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 2.358490566037736e-05, |
|
"loss": 5.9679, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 2.339622641509434e-05, |
|
"loss": 6.1985, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 2.320754716981132e-05, |
|
"loss": 5.9773, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 2.3018867924528304e-05, |
|
"loss": 5.9543, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 2.2830188679245286e-05, |
|
"loss": 6.0227, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 2.2641509433962265e-05, |
|
"loss": 5.9294, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 2.2452830188679247e-05, |
|
"loss": 6.1774, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 2.226415094339623e-05, |
|
"loss": 6.0009, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.2075471698113208e-05, |
|
"loss": 5.9207, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.188679245283019e-05, |
|
"loss": 5.9735, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.1698113207547172e-05, |
|
"loss": 5.9751, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.1509433962264154e-05, |
|
"loss": 5.8208, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.1320754716981133e-05, |
|
"loss": 5.8645, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 2.1132075471698115e-05, |
|
"loss": 5.9207, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 2.0943396226415098e-05, |
|
"loss": 5.972, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 2.0754716981132076e-05, |
|
"loss": 5.8784, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.056603773584906e-05, |
|
"loss": 6.0364, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 2.037735849056604e-05, |
|
"loss": 6.0571, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.018867924528302e-05, |
|
"loss": 5.8623, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 2e-05, |
|
"loss": 5.6061, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 1.9811320754716984e-05, |
|
"loss": 5.8306, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 1.9622641509433966e-05, |
|
"loss": 6.0213, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 1.9433962264150945e-05, |
|
"loss": 6.022, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 1.9245283018867927e-05, |
|
"loss": 5.9523, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 1.905660377358491e-05, |
|
"loss": 5.8842, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 1.8867924528301888e-05, |
|
"loss": 5.9051, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 1.8679245283018867e-05, |
|
"loss": 5.91, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 1.849056603773585e-05, |
|
"loss": 5.7862, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 1.830188679245283e-05, |
|
"loss": 5.8941, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 1.811320754716981e-05, |
|
"loss": 6.0136, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 1.7924528301886792e-05, |
|
"loss": 5.7909, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 1.7735849056603774e-05, |
|
"loss": 5.8893, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 1.7547169811320753e-05, |
|
"loss": 5.8309, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 1.7358490566037735e-05, |
|
"loss": 5.6179, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 1.7169811320754717e-05, |
|
"loss": 5.8054, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 1.69811320754717e-05, |
|
"loss": 5.8038, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 1.6792452830188678e-05, |
|
"loss": 5.8717, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 1.660377358490566e-05, |
|
"loss": 5.7068, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 1.6415094339622643e-05, |
|
"loss": 5.8516, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 1.622641509433962e-05, |
|
"loss": 5.7989, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 1.6037735849056604e-05, |
|
"loss": 5.9828, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 1.5849056603773586e-05, |
|
"loss": 5.8526, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 1.5660377358490564e-05, |
|
"loss": 5.9012, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 1.5471698113207547e-05, |
|
"loss": 5.9608, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 1.528301886792453e-05, |
|
"loss": 5.8062, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 1.509433962264151e-05, |
|
"loss": 5.7624, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 1.4905660377358491e-05, |
|
"loss": 5.8388, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 1.4716981132075472e-05, |
|
"loss": 5.8675, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 1.4528301886792452e-05, |
|
"loss": 5.971, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 1.4339622641509435e-05, |
|
"loss": 5.8333, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 1.4150943396226415e-05, |
|
"loss": 5.9516, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 1.3962264150943397e-05, |
|
"loss": 5.8128, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 1.3773584905660378e-05, |
|
"loss": 5.6509, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 1.3584905660377358e-05, |
|
"loss": 5.7938, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 1.339622641509434e-05, |
|
"loss": 5.736, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 1.320754716981132e-05, |
|
"loss": 5.6844, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 1.3018867924528303e-05, |
|
"loss": 5.7359, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 1.2830188679245283e-05, |
|
"loss": 5.8941, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 1.2641509433962264e-05, |
|
"loss": 5.8553, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 1.2452830188679246e-05, |
|
"loss": 5.6521, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 1.2264150943396227e-05, |
|
"loss": 5.921, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 1.2075471698113209e-05, |
|
"loss": 5.7235, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 1.188679245283019e-05, |
|
"loss": 5.8019, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 1.169811320754717e-05, |
|
"loss": 5.7486, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 1.1509433962264152e-05, |
|
"loss": 5.7429, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 1.1320754716981132e-05, |
|
"loss": 5.8486, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 1.1132075471698115e-05, |
|
"loss": 5.7921, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 1.0943396226415095e-05, |
|
"loss": 5.8232, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 1.0754716981132077e-05, |
|
"loss": 6.083, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 1.0566037735849058e-05, |
|
"loss": 5.7263, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 1.0377358490566038e-05, |
|
"loss": 5.6783, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 1.018867924528302e-05, |
|
"loss": 5.7754, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 1e-05, |
|
"loss": 6.1204, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 9.811320754716983e-06, |
|
"loss": 5.6473, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 9.622641509433963e-06, |
|
"loss": 5.8676, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 9.433962264150944e-06, |
|
"loss": 5.7221, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 9.245283018867924e-06, |
|
"loss": 5.8749, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 9.056603773584905e-06, |
|
"loss": 5.7737, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 8.867924528301887e-06, |
|
"loss": 5.7663, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 8.679245283018868e-06, |
|
"loss": 5.6642, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 8.49056603773585e-06, |
|
"loss": 5.815, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 8.30188679245283e-06, |
|
"loss": 5.6594, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 8.11320754716981e-06, |
|
"loss": 6.0083, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 7.924528301886793e-06, |
|
"loss": 5.7564, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 7.735849056603773e-06, |
|
"loss": 5.677, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 7.547169811320755e-06, |
|
"loss": 5.8026, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 7.358490566037736e-06, |
|
"loss": 5.9558, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 7.169811320754717e-06, |
|
"loss": 5.6896, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 6.981132075471699e-06, |
|
"loss": 5.8383, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 6.792452830188679e-06, |
|
"loss": 5.8355, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 6.60377358490566e-06, |
|
"loss": 5.6567, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 6.415094339622642e-06, |
|
"loss": 5.7913, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 6.226415094339623e-06, |
|
"loss": 5.9586, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 6.037735849056604e-06, |
|
"loss": 5.7501, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 5.849056603773585e-06, |
|
"loss": 5.6516, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 5.660377358490566e-06, |
|
"loss": 5.7582, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 5.4716981132075475e-06, |
|
"loss": 5.6714, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 5.283018867924529e-06, |
|
"loss": 5.7091, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 5.09433962264151e-06, |
|
"loss": 5.6222, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 4.9056603773584915e-06, |
|
"loss": 5.7866, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 4.716981132075472e-06, |
|
"loss": 5.6759, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 4.5283018867924524e-06, |
|
"loss": 5.679, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 4.339622641509434e-06, |
|
"loss": 5.7489, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 4.150943396226415e-06, |
|
"loss": 5.6387, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 3.962264150943396e-06, |
|
"loss": 5.6617, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 3.7735849056603773e-06, |
|
"loss": 5.6338, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 3.5849056603773586e-06, |
|
"loss": 5.6264, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 3.3962264150943395e-06, |
|
"loss": 5.8117, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 3.207547169811321e-06, |
|
"loss": 5.8644, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 3.018867924528302e-06, |
|
"loss": 5.7927, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 2.830188679245283e-06, |
|
"loss": 5.7877, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 2.6415094339622644e-06, |
|
"loss": 5.7415, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 2.4528301886792457e-06, |
|
"loss": 5.5958, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 2.2641509433962262e-06, |
|
"loss": 5.4249, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 2.0754716981132075e-06, |
|
"loss": 5.7735, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 1.8867924528301887e-06, |
|
"loss": 5.672, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 1.6981132075471698e-06, |
|
"loss": 5.5705, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 1.509433962264151e-06, |
|
"loss": 5.8487, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 1.3207547169811322e-06, |
|
"loss": 5.8151, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 1.1320754716981131e-06, |
|
"loss": 5.6904, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 9.433962264150943e-07, |
|
"loss": 5.7021, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 7.547169811320755e-07, |
|
"loss": 5.7196, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 5.660377358490566e-07, |
|
"loss": 5.8667, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 3.773584905660378e-07, |
|
"loss": 5.8458, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 1.886792452830189e-07, |
|
"loss": 5.7733, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 0.0, |
|
"loss": 5.8065, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 265, |
|
"total_flos": 209285213159424.0, |
|
"train_loss": 6.612753483034529, |
|
"train_runtime": 33.2807, |
|
"train_samples_per_second": 503.444, |
|
"train_steps_per_second": 7.963 |
|
} |
|
], |
|
"max_steps": 265, |
|
"num_train_epochs": 5, |
|
"total_flos": 209285213159424.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|