|
{ |
|
"best_metric": 0.677589476108551, |
|
"best_model_checkpoint": "ast-finetuned-audioset-10-10-0.4593-finetuning1-bp-v2/checkpoint-626", |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 1565, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.9961661341853036e-05, |
|
"loss": 0.6441, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.992332268370607e-05, |
|
"loss": 0.6289, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.9884984025559106e-05, |
|
"loss": 0.6572, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.984664536741214e-05, |
|
"loss": 0.6638, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.9808306709265177e-05, |
|
"loss": 0.6765, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.9769968051118212e-05, |
|
"loss": 0.6938, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.9731629392971247e-05, |
|
"loss": 0.6715, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.9693290734824282e-05, |
|
"loss": 0.7168, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.9654952076677317e-05, |
|
"loss": 0.5864, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.961661341853035e-05, |
|
"loss": 0.6593, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.9578274760383384e-05, |
|
"loss": 0.6444, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.9539936102236423e-05, |
|
"loss": 0.7718, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.9501597444089458e-05, |
|
"loss": 0.6247, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.9463258785942493e-05, |
|
"loss": 0.7347, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.9424920127795528e-05, |
|
"loss": 0.7123, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.9386581469648563e-05, |
|
"loss": 0.6798, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.93482428115016e-05, |
|
"loss": 0.6984, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.9309904153354634e-05, |
|
"loss": 0.7256, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.927156549520767e-05, |
|
"loss": 0.7119, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.9233226837060704e-05, |
|
"loss": 0.666, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.919488817891374e-05, |
|
"loss": 0.6492, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.9156549520766774e-05, |
|
"loss": 0.701, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.911821086261981e-05, |
|
"loss": 0.7348, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.9079872204472845e-05, |
|
"loss": 0.6921, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.904153354632588e-05, |
|
"loss": 0.7379, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.9003194888178915e-05, |
|
"loss": 0.666, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.896485623003195e-05, |
|
"loss": 0.5985, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 2.8926517571884985e-05, |
|
"loss": 0.7178, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.888817891373802e-05, |
|
"loss": 0.6295, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.8849840255591052e-05, |
|
"loss": 0.7068, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.8811501597444087e-05, |
|
"loss": 0.7146, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 2.8773162939297123e-05, |
|
"loss": 0.6619, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.8734824281150158e-05, |
|
"loss": 0.6854, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.8696485623003196e-05, |
|
"loss": 0.6631, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 2.865814696485623e-05, |
|
"loss": 0.668, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.8619808306709267e-05, |
|
"loss": 0.7906, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.8581469648562302e-05, |
|
"loss": 0.7034, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 2.8543130990415337e-05, |
|
"loss": 0.7359, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 2.8504792332268372e-05, |
|
"loss": 0.6615, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.8466453674121407e-05, |
|
"loss": 0.6664, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.8428115015974443e-05, |
|
"loss": 0.6841, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.8389776357827478e-05, |
|
"loss": 0.7221, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.8351437699680513e-05, |
|
"loss": 0.6912, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.8313099041533548e-05, |
|
"loss": 0.7097, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.8274760383386583e-05, |
|
"loss": 0.6062, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.823642172523962e-05, |
|
"loss": 0.6829, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.8198083067092653e-05, |
|
"loss": 0.6543, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.815974440894569e-05, |
|
"loss": 0.6051, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.8121405750798724e-05, |
|
"loss": 0.6734, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.8083067092651756e-05, |
|
"loss": 0.6618, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.804472843450479e-05, |
|
"loss": 0.6416, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.8006389776357826e-05, |
|
"loss": 0.6395, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.796805111821086e-05, |
|
"loss": 0.6068, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.7929712460063896e-05, |
|
"loss": 0.687, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.789137380191693e-05, |
|
"loss": 0.6594, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.7853035143769967e-05, |
|
"loss": 0.6232, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.7814696485623005e-05, |
|
"loss": 0.6892, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.777635782747604e-05, |
|
"loss": 0.6637, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.7738019169329075e-05, |
|
"loss": 0.7069, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.769968051118211e-05, |
|
"loss": 0.7056, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.7661341853035146e-05, |
|
"loss": 0.6551, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.762300319488818e-05, |
|
"loss": 0.6612, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.7584664536741216e-05, |
|
"loss": 0.6477, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.754632587859425e-05, |
|
"loss": 0.6746, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.7507987220447286e-05, |
|
"loss": 0.635, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.746964856230032e-05, |
|
"loss": 0.6964, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.7431309904153357e-05, |
|
"loss": 0.705, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.7392971246006392e-05, |
|
"loss": 0.6138, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.7354632587859427e-05, |
|
"loss": 0.6568, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.731629392971246e-05, |
|
"loss": 0.6016, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.7277955271565494e-05, |
|
"loss": 0.6527, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.723961661341853e-05, |
|
"loss": 0.6552, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7201277955271564e-05, |
|
"loss": 0.693, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.71629392971246e-05, |
|
"loss": 0.6139, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.7124600638977635e-05, |
|
"loss": 0.6431, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.708626198083067e-05, |
|
"loss": 0.6479, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.7047923322683705e-05, |
|
"loss": 0.6459, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.700958466453674e-05, |
|
"loss": 0.6273, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.6971246006389775e-05, |
|
"loss": 0.6341, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.6932907348242814e-05, |
|
"loss": 0.6776, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.689456869009585e-05, |
|
"loss": 0.6508, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.6856230031948884e-05, |
|
"loss": 0.6779, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.681789137380192e-05, |
|
"loss": 0.5938, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.6779552715654955e-05, |
|
"loss": 0.6443, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.674121405750799e-05, |
|
"loss": 0.6245, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.6702875399361025e-05, |
|
"loss": 0.6271, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.666453674121406e-05, |
|
"loss": 0.6683, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.6626198083067095e-05, |
|
"loss": 0.6095, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.658785942492013e-05, |
|
"loss": 0.6363, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.6549520766773162e-05, |
|
"loss": 0.6222, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.6511182108626197e-05, |
|
"loss": 0.6411, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.6472843450479233e-05, |
|
"loss": 0.6477, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.6434504792332268e-05, |
|
"loss": 0.6633, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.6396166134185303e-05, |
|
"loss": 0.6431, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.6357827476038338e-05, |
|
"loss": 0.611, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.6319488817891373e-05, |
|
"loss": 0.6429, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 2.628115015974441e-05, |
|
"loss": 0.6201, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 2.6242811501597443e-05, |
|
"loss": 0.6753, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 2.620447284345048e-05, |
|
"loss": 0.6661, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 2.6166134185303514e-05, |
|
"loss": 0.6058, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 2.612779552715655e-05, |
|
"loss": 0.6615, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 2.6089456869009584e-05, |
|
"loss": 0.6526, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 2.6051118210862623e-05, |
|
"loss": 0.6457, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 2.6012779552715658e-05, |
|
"loss": 0.697, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 2.5974440894568693e-05, |
|
"loss": 0.6243, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.5936102236421728e-05, |
|
"loss": 0.6342, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.5897763578274763e-05, |
|
"loss": 0.6882, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.58594249201278e-05, |
|
"loss": 0.608, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.582108626198083e-05, |
|
"loss": 0.6117, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.5782747603833865e-05, |
|
"loss": 0.6661, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 2.57444089456869e-05, |
|
"loss": 0.6656, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.5706070287539936e-05, |
|
"loss": 0.6372, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 2.566773162939297e-05, |
|
"loss": 0.5845, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.5629392971246006e-05, |
|
"loss": 0.6524, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.559105431309904e-05, |
|
"loss": 0.6034, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 2.5552715654952076e-05, |
|
"loss": 0.7123, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 2.551437699680511e-05, |
|
"loss": 0.6529, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 2.5476038338658147e-05, |
|
"loss": 0.6544, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.5437699680511182e-05, |
|
"loss": 0.6274, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.5399361022364217e-05, |
|
"loss": 0.7083, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.5361022364217252e-05, |
|
"loss": 0.6452, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.5322683706070287e-05, |
|
"loss": 0.5892, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.5284345047923323e-05, |
|
"loss": 0.6619, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.5246006389776358e-05, |
|
"loss": 0.5389, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.5207667731629396e-05, |
|
"loss": 0.6233, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.516932907348243e-05, |
|
"loss": 0.6089, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.5130990415335467e-05, |
|
"loss": 0.6321, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.5092651757188502e-05, |
|
"loss": 0.6448, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.5054313099041534e-05, |
|
"loss": 0.6556, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.501597444089457e-05, |
|
"loss": 0.6326, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.4977635782747604e-05, |
|
"loss": 0.6171, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.493929712460064e-05, |
|
"loss": 0.6909, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.4900958466453674e-05, |
|
"loss": 0.6293, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.486261980830671e-05, |
|
"loss": 0.7043, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.4824281150159745e-05, |
|
"loss": 0.6922, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 2.478594249201278e-05, |
|
"loss": 0.5919, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.4747603833865815e-05, |
|
"loss": 0.5831, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.470926517571885e-05, |
|
"loss": 0.6656, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 2.4670926517571885e-05, |
|
"loss": 0.6388, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 2.463258785942492e-05, |
|
"loss": 0.647, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.4594249201277956e-05, |
|
"loss": 0.6672, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.455591054313099e-05, |
|
"loss": 0.6203, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.4517571884984026e-05, |
|
"loss": 0.6143, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.447923322683706e-05, |
|
"loss": 0.6173, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.4440894568690096e-05, |
|
"loss": 0.6131, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.440255591054313e-05, |
|
"loss": 0.6613, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.4364217252396167e-05, |
|
"loss": 0.6702, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.4325878594249205e-05, |
|
"loss": 0.6413, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.4287539936102237e-05, |
|
"loss": 0.5822, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.4249201277955272e-05, |
|
"loss": 0.6207, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.4210862619808307e-05, |
|
"loss": 0.5617, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.4172523961661342e-05, |
|
"loss": 0.6764, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.4134185303514378e-05, |
|
"loss": 0.5974, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.4095846645367413e-05, |
|
"loss": 0.7435, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.4057507987220448e-05, |
|
"loss": 0.6443, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.4019169329073483e-05, |
|
"loss": 0.6656, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.5688009262084961, |
|
"eval_loss": 0.7357929348945618, |
|
"eval_runtime": 3203.0791, |
|
"eval_samples_per_second": 1.341, |
|
"eval_steps_per_second": 0.042, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.3980830670926518e-05, |
|
"loss": 0.6123, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 2.3942492012779553e-05, |
|
"loss": 0.6665, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 2.390415335463259e-05, |
|
"loss": 0.6232, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 2.3865814696485624e-05, |
|
"loss": 0.6205, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 2.382747603833866e-05, |
|
"loss": 0.631, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 2.3789137380191694e-05, |
|
"loss": 0.6061, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 2.375079872204473e-05, |
|
"loss": 0.6014, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 2.3712460063897764e-05, |
|
"loss": 0.6176, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 2.36741214057508e-05, |
|
"loss": 0.6523, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 2.3635782747603835e-05, |
|
"loss": 0.6511, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 2.359744408945687e-05, |
|
"loss": 0.6543, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 2.3559105431309905e-05, |
|
"loss": 0.6375, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 2.3520766773162937e-05, |
|
"loss": 0.6704, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 2.3482428115015975e-05, |
|
"loss": 0.5776, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 2.344408945686901e-05, |
|
"loss": 0.6643, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 2.3405750798722046e-05, |
|
"loss": 0.643, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 2.336741214057508e-05, |
|
"loss": 0.6681, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 2.3329073482428116e-05, |
|
"loss": 0.5931, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 2.329073482428115e-05, |
|
"loss": 0.6165, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 2.3252396166134186e-05, |
|
"loss": 0.6805, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 2.321405750798722e-05, |
|
"loss": 0.6017, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 2.3175718849840257e-05, |
|
"loss": 0.6375, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 2.3137380191693292e-05, |
|
"loss": 0.6365, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 2.3099041533546327e-05, |
|
"loss": 0.6069, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 2.3060702875399362e-05, |
|
"loss": 0.6181, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 2.3022364217252397e-05, |
|
"loss": 0.7057, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 2.2984025559105432e-05, |
|
"loss": 0.6879, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 2.2945686900958468e-05, |
|
"loss": 0.6772, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 2.2907348242811503e-05, |
|
"loss": 0.6226, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 2.2869009584664538e-05, |
|
"loss": 0.6852, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 2.2830670926517573e-05, |
|
"loss": 0.6546, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.2792332268370608e-05, |
|
"loss": 0.6118, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.275399361022364e-05, |
|
"loss": 0.6018, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.2715654952076675e-05, |
|
"loss": 0.6146, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.267731629392971e-05, |
|
"loss": 0.6137, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.2638977635782746e-05, |
|
"loss": 0.5415, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.2600638977635784e-05, |
|
"loss": 0.6223, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.256230031948882e-05, |
|
"loss": 0.6453, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.2523961661341854e-05, |
|
"loss": 0.666, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.248562300319489e-05, |
|
"loss": 0.6117, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.2447284345047925e-05, |
|
"loss": 0.5793, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.240894568690096e-05, |
|
"loss": 0.6421, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.2370607028753995e-05, |
|
"loss": 0.6118, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.233226837060703e-05, |
|
"loss": 0.7204, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.2293929712460065e-05, |
|
"loss": 0.6333, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.22555910543131e-05, |
|
"loss": 0.6228, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.2217252396166136e-05, |
|
"loss": 0.6447, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.217891373801917e-05, |
|
"loss": 0.5787, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.2140575079872206e-05, |
|
"loss": 0.6639, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.210223642172524e-05, |
|
"loss": 0.5804, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.2063897763578276e-05, |
|
"loss": 0.5911, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.202555910543131e-05, |
|
"loss": 0.703, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.1987220447284343e-05, |
|
"loss": 0.679, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.194888178913738e-05, |
|
"loss": 0.7204, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.1910543130990414e-05, |
|
"loss": 0.6614, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.187220447284345e-05, |
|
"loss": 0.6194, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.1833865814696484e-05, |
|
"loss": 0.6539, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.179552715654952e-05, |
|
"loss": 0.6258, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.1757188498402554e-05, |
|
"loss": 0.5879, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.1718849840255593e-05, |
|
"loss": 0.5875, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.1680511182108628e-05, |
|
"loss": 0.6386, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.1642172523961663e-05, |
|
"loss": 0.6548, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.16038338658147e-05, |
|
"loss": 0.5866, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.1565495207667734e-05, |
|
"loss": 0.6489, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.152715654952077e-05, |
|
"loss": 0.6885, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.1488817891373804e-05, |
|
"loss": 0.6796, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.145047923322684e-05, |
|
"loss": 0.5876, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.1412140575079874e-05, |
|
"loss": 0.6219, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.137380191693291e-05, |
|
"loss": 0.5989, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.1335463258785945e-05, |
|
"loss": 0.5825, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.129712460063898e-05, |
|
"loss": 0.5712, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.125878594249201e-05, |
|
"loss": 0.6612, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.1220447284345047e-05, |
|
"loss": 0.5837, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.1182108626198082e-05, |
|
"loss": 0.612, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.1143769968051117e-05, |
|
"loss": 0.6559, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.1105431309904152e-05, |
|
"loss": 0.6591, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.1067092651757187e-05, |
|
"loss": 0.6288, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.1028753993610222e-05, |
|
"loss": 0.6524, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.0990415335463258e-05, |
|
"loss": 0.6079, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.0952076677316293e-05, |
|
"loss": 0.6039, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.0913738019169328e-05, |
|
"loss": 0.5888, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.0875399361022363e-05, |
|
"loss": 0.6099, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.08370607028754e-05, |
|
"loss": 0.5547, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.0798722044728437e-05, |
|
"loss": 0.6258, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.0760383386581472e-05, |
|
"loss": 0.5904, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.0722044728434507e-05, |
|
"loss": 0.5921, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.0683706070287542e-05, |
|
"loss": 0.581, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.0645367412140577e-05, |
|
"loss": 0.6627, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.0607028753993613e-05, |
|
"loss": 0.6303, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.0568690095846648e-05, |
|
"loss": 0.6454, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.0530351437699683e-05, |
|
"loss": 0.6391, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.0492012779552715e-05, |
|
"loss": 0.5063, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.045367412140575e-05, |
|
"loss": 0.5964, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.0415335463258785e-05, |
|
"loss": 0.5932, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.037699680511182e-05, |
|
"loss": 0.6138, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.0338658146964855e-05, |
|
"loss": 0.5428, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.030031948881789e-05, |
|
"loss": 0.6472, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.0261980830670926e-05, |
|
"loss": 0.6685, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.022364217252396e-05, |
|
"loss": 0.6025, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.0185303514376996e-05, |
|
"loss": 0.6466, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.014696485623003e-05, |
|
"loss": 0.5887, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.0108626198083066e-05, |
|
"loss": 0.7098, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.00702875399361e-05, |
|
"loss": 0.6376, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.0031948881789137e-05, |
|
"loss": 0.5713, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.9993610223642175e-05, |
|
"loss": 0.6579, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.995527156549521e-05, |
|
"loss": 0.6616, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.9916932907348246e-05, |
|
"loss": 0.652, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.987859424920128e-05, |
|
"loss": 0.634, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.9840255591054316e-05, |
|
"loss": 0.6714, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.980191693290735e-05, |
|
"loss": 0.6627, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.9763578274760386e-05, |
|
"loss": 0.6686, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.9725239616613418e-05, |
|
"loss": 0.6024, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.9686900958466453e-05, |
|
"loss": 0.5986, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.964856230031949e-05, |
|
"loss": 0.4928, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.9610223642172524e-05, |
|
"loss": 0.5281, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.957188498402556e-05, |
|
"loss": 0.5763, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.9533546325878594e-05, |
|
"loss": 0.6374, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.949520766773163e-05, |
|
"loss": 0.6591, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.9456869009584664e-05, |
|
"loss": 0.5999, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.94185303514377e-05, |
|
"loss": 0.7151, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.9380191693290735e-05, |
|
"loss": 0.5627, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.934185303514377e-05, |
|
"loss": 0.6144, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.9303514376996805e-05, |
|
"loss": 0.6194, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.926517571884984e-05, |
|
"loss": 0.6374, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.9226837060702875e-05, |
|
"loss": 0.6647, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.918849840255591e-05, |
|
"loss": 0.6892, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9150159744408946e-05, |
|
"loss": 0.6928, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9111821086261984e-05, |
|
"loss": 0.698, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.907348242811502e-05, |
|
"loss": 0.5687, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9035143769968054e-05, |
|
"loss": 0.6076, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.899680511182109e-05, |
|
"loss": 0.584, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.895846645367412e-05, |
|
"loss": 0.681, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.8920127795527157e-05, |
|
"loss": 0.5812, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.888178913738019e-05, |
|
"loss": 0.6194, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.8843450479233227e-05, |
|
"loss": 0.5849, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8805111821086262e-05, |
|
"loss": 0.6218, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8766773162939297e-05, |
|
"loss": 0.6273, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8728434504792332e-05, |
|
"loss": 0.6592, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8690095846645367e-05, |
|
"loss": 0.6193, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8651757188498403e-05, |
|
"loss": 0.5703, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8613418530351438e-05, |
|
"loss": 0.501, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8575079872204473e-05, |
|
"loss": 0.6547, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.8536741214057508e-05, |
|
"loss": 0.594, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.8498402555910543e-05, |
|
"loss": 0.6213, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.846006389776358e-05, |
|
"loss": 0.6633, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.8421725239616614e-05, |
|
"loss": 0.6737, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.838338658146965e-05, |
|
"loss": 0.6047, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.8345047923322684e-05, |
|
"loss": 0.6795, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.830670926517572e-05, |
|
"loss": 0.6357, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.8268370607028754e-05, |
|
"loss": 0.6206, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.8230031948881793e-05, |
|
"loss": 0.7065, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.8191693290734825e-05, |
|
"loss": 0.6732, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.815335463258786e-05, |
|
"loss": 0.5882, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.8115015974440895e-05, |
|
"loss": 0.5762, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.807667731629393e-05, |
|
"loss": 0.5851, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.8038338658146965e-05, |
|
"loss": 0.6224, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.6934, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.6025611162185669, |
|
"eval_loss": 0.677589476108551, |
|
"eval_runtime": 3149.7711, |
|
"eval_samples_per_second": 1.364, |
|
"eval_steps_per_second": 0.043, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.7961661341853036e-05, |
|
"loss": 0.615, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.792332268370607e-05, |
|
"loss": 0.6546, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.7884984025559106e-05, |
|
"loss": 0.5759, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.784664536741214e-05, |
|
"loss": 0.7322, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.7808306709265176e-05, |
|
"loss": 0.629, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.776996805111821e-05, |
|
"loss": 0.6263, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.7731629392971247e-05, |
|
"loss": 0.6228, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.7693290734824282e-05, |
|
"loss": 0.5817, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.7654952076677317e-05, |
|
"loss": 0.6373, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.7616613418530352e-05, |
|
"loss": 0.5643, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.7578274760383387e-05, |
|
"loss": 0.6174, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.7539936102236422e-05, |
|
"loss": 0.6153, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.7501597444089458e-05, |
|
"loss": 0.6134, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.746325878594249e-05, |
|
"loss": 0.5695, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.7424920127795525e-05, |
|
"loss": 0.6241, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.7386581469648563e-05, |
|
"loss": 0.569, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.7348242811501598e-05, |
|
"loss": 0.6013, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.7309904153354633e-05, |
|
"loss": 0.5607, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.727156549520767e-05, |
|
"loss": 0.6161, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.7233226837060704e-05, |
|
"loss": 0.5929, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.719488817891374e-05, |
|
"loss": 0.6489, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.7156549520766774e-05, |
|
"loss": 0.5791, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.711821086261981e-05, |
|
"loss": 0.5774, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.7079872204472844e-05, |
|
"loss": 0.6839, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.704153354632588e-05, |
|
"loss": 0.5733, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.7003194888178915e-05, |
|
"loss": 0.5827, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.696485623003195e-05, |
|
"loss": 0.5698, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.6926517571884985e-05, |
|
"loss": 0.5815, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.688817891373802e-05, |
|
"loss": 0.6066, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.6849840255591055e-05, |
|
"loss": 0.6197, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.681150159744409e-05, |
|
"loss": 0.5718, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.6773162939297126e-05, |
|
"loss": 0.5479, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.673482428115016e-05, |
|
"loss": 0.6439, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.6696485623003193e-05, |
|
"loss": 0.6525, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.6658146964856228e-05, |
|
"loss": 0.5945, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.6619808306709263e-05, |
|
"loss": 0.5785, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.6581469648562298e-05, |
|
"loss": 0.6794, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.6543130990415333e-05, |
|
"loss": 0.5943, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.6504792332268372e-05, |
|
"loss": 0.5477, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.6466453674121407e-05, |
|
"loss": 0.609, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.6428115015974442e-05, |
|
"loss": 0.6088, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.6389776357827477e-05, |
|
"loss": 0.5371, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.6351437699680513e-05, |
|
"loss": 0.5081, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.6313099041533548e-05, |
|
"loss": 0.6044, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.6274760383386583e-05, |
|
"loss": 0.6805, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.6236421725239618e-05, |
|
"loss": 0.5758, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.6198083067092653e-05, |
|
"loss": 0.5951, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.615974440894569e-05, |
|
"loss": 0.5904, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.6121405750798724e-05, |
|
"loss": 0.6482, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.608306709265176e-05, |
|
"loss": 0.6821, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.6044728434504794e-05, |
|
"loss": 0.5993, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.600638977635783e-05, |
|
"loss": 0.6405, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.5968051118210864e-05, |
|
"loss": 0.5977, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.5929712460063896e-05, |
|
"loss": 0.6535, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.589137380191693e-05, |
|
"loss": 0.6182, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.5853035143769966e-05, |
|
"loss": 0.5556, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.5814696485623e-05, |
|
"loss": 0.6626, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.5776357827476037e-05, |
|
"loss": 0.6106, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.5738019169329072e-05, |
|
"loss": 0.5401, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.5699680511182107e-05, |
|
"loss": 0.5346, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.5661341853035142e-05, |
|
"loss": 0.6638, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.562300319488818e-05, |
|
"loss": 0.5947, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.5584664536741216e-05, |
|
"loss": 0.5658, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.554632587859425e-05, |
|
"loss": 0.6878, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.5507987220447286e-05, |
|
"loss": 0.6821, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.546964856230032e-05, |
|
"loss": 0.7108, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.5431309904153356e-05, |
|
"loss": 0.5898, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.539297124600639e-05, |
|
"loss": 0.5722, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.5354632587859427e-05, |
|
"loss": 0.5794, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.5316293929712462e-05, |
|
"loss": 0.6522, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.5277955271565497e-05, |
|
"loss": 0.6789, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.5239616613418532e-05, |
|
"loss": 0.6487, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.5201277955271567e-05, |
|
"loss": 0.6435, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.51629392971246e-05, |
|
"loss": 0.5946, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.5124600638977634e-05, |
|
"loss": 0.6257, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.508626198083067e-05, |
|
"loss": 0.7132, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.5047923322683705e-05, |
|
"loss": 0.5461, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.500958466453674e-05, |
|
"loss": 0.5636, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4971246006389777e-05, |
|
"loss": 0.6666, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.4932907348242812e-05, |
|
"loss": 0.6291, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.4894568690095847e-05, |
|
"loss": 0.5995, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.4856230031948882e-05, |
|
"loss": 0.5494, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.4817891373801917e-05, |
|
"loss": 0.6506, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.4779552715654953e-05, |
|
"loss": 0.583, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.4741214057507988e-05, |
|
"loss": 0.6563, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.4702875399361023e-05, |
|
"loss": 0.5912, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.4664536741214058e-05, |
|
"loss": 0.6305, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.4626198083067093e-05, |
|
"loss": 0.572, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.4587859424920127e-05, |
|
"loss": 0.6084, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.4549520766773164e-05, |
|
"loss": 0.5786, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.4511182108626199e-05, |
|
"loss": 0.614, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.4472843450479234e-05, |
|
"loss": 0.6951, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.4434504792332269e-05, |
|
"loss": 0.5806, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.4396166134185304e-05, |
|
"loss": 0.6374, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.435782747603834e-05, |
|
"loss": 0.5693, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.4319488817891375e-05, |
|
"loss": 0.7299, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.428115015974441e-05, |
|
"loss": 0.5376, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.4242811501597445e-05, |
|
"loss": 0.6866, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.4204472843450478e-05, |
|
"loss": 0.6129, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.4166134185303514e-05, |
|
"loss": 0.6229, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.412779552715655e-05, |
|
"loss": 0.6042, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.4089456869009586e-05, |
|
"loss": 0.6098, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.405111821086262e-05, |
|
"loss": 0.576, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.4012779552715656e-05, |
|
"loss": 0.6003, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.3974440894568691e-05, |
|
"loss": 0.6046, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.3936102236421726e-05, |
|
"loss": 0.6067, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.3897763578274761e-05, |
|
"loss": 0.5585, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.3859424920127795e-05, |
|
"loss": 0.6436, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.382108626198083e-05, |
|
"loss": 0.591, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.3782747603833865e-05, |
|
"loss": 0.5908, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 1.37444089456869e-05, |
|
"loss": 0.5671, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 1.3706070287539935e-05, |
|
"loss": 0.6422, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 1.3667731629392972e-05, |
|
"loss": 0.6385, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 1.3629392971246008e-05, |
|
"loss": 0.6338, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 1.3591054313099043e-05, |
|
"loss": 0.6053, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 1.3552715654952078e-05, |
|
"loss": 0.5647, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 1.3514376996805113e-05, |
|
"loss": 0.5518, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 1.3476038338658146e-05, |
|
"loss": 0.6357, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 1.3437699680511182e-05, |
|
"loss": 0.6535, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 1.3399361022364217e-05, |
|
"loss": 0.6778, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 1.3361022364217252e-05, |
|
"loss": 0.5676, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 1.3322683706070287e-05, |
|
"loss": 0.6081, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 1.3284345047923322e-05, |
|
"loss": 0.6059, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 1.324600638977636e-05, |
|
"loss": 0.6292, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 1.3207667731629394e-05, |
|
"loss": 0.618, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 1.316932907348243e-05, |
|
"loss": 0.5128, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 1.3130990415335465e-05, |
|
"loss": 0.5484, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.3092651757188498e-05, |
|
"loss": 0.5384, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.3054313099041533e-05, |
|
"loss": 0.7096, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.3015974440894568e-05, |
|
"loss": 0.6335, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.2977635782747604e-05, |
|
"loss": 0.625, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.2939297124600639e-05, |
|
"loss": 0.7057, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.2900958466453674e-05, |
|
"loss": 0.6188, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.2862619808306709e-05, |
|
"loss": 0.5762, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.2824281150159746e-05, |
|
"loss": 0.5937, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.2785942492012781e-05, |
|
"loss": 0.6604, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.2747603833865816e-05, |
|
"loss": 0.6576, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.270926517571885e-05, |
|
"loss": 0.6, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.2670926517571885e-05, |
|
"loss": 0.5268, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.263258785942492e-05, |
|
"loss": 0.6061, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.2594249201277955e-05, |
|
"loss": 0.5935, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.255591054313099e-05, |
|
"loss": 0.5937, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.2517571884984026e-05, |
|
"loss": 0.5415, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.247923322683706e-05, |
|
"loss": 0.5544, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.2440894568690096e-05, |
|
"loss": 0.5166, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.2402555910543131e-05, |
|
"loss": 0.5572, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.2364217252396168e-05, |
|
"loss": 0.6462, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.2325878594249201e-05, |
|
"loss": 0.5443, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.2287539936102237e-05, |
|
"loss": 0.6136, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.2249201277955272e-05, |
|
"loss": 0.6082, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.2210862619808307e-05, |
|
"loss": 0.6294, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 1.2172523961661342e-05, |
|
"loss": 0.6205, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.2134185303514377e-05, |
|
"loss": 0.6345, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.2095846645367412e-05, |
|
"loss": 0.6088, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.2057507987220448e-05, |
|
"loss": 0.6511, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 1.2019169329073483e-05, |
|
"loss": 0.7065, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.584400475025177, |
|
"eval_loss": 0.7319782972335815, |
|
"eval_runtime": 3278.4387, |
|
"eval_samples_per_second": 1.31, |
|
"eval_steps_per_second": 0.041, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 1.1980830670926518e-05, |
|
"loss": 0.5738, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 1.1942492012779553e-05, |
|
"loss": 0.6859, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 1.1904153354632588e-05, |
|
"loss": 0.6642, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 1.1865814696485623e-05, |
|
"loss": 0.5589, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 1.1827476038338659e-05, |
|
"loss": 0.598, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 1.1789137380191694e-05, |
|
"loss": 0.6001, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 1.1750798722044729e-05, |
|
"loss": 0.6187, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 1.1712460063897764e-05, |
|
"loss": 0.5764, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 1.16741214057508e-05, |
|
"loss": 0.6732, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 1.1635782747603834e-05, |
|
"loss": 0.6053, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 1.159744408945687e-05, |
|
"loss": 0.6293, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 1.1559105431309903e-05, |
|
"loss": 0.5773, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 1.152076677316294e-05, |
|
"loss": 0.5236, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 1.1482428115015975e-05, |
|
"loss": 0.59, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 1.144408945686901e-05, |
|
"loss": 0.5683, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 1.1405750798722045e-05, |
|
"loss": 0.6164, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 1.136741214057508e-05, |
|
"loss": 0.6331, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 1.1329073482428116e-05, |
|
"loss": 0.6055, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 1.129073482428115e-05, |
|
"loss": 0.6308, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 1.1252396166134186e-05, |
|
"loss": 0.4942, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 1.1214057507987221e-05, |
|
"loss": 0.5791, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 1.1175718849840255e-05, |
|
"loss": 0.6238, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 1.113738019169329e-05, |
|
"loss": 0.6092, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 1.1099041533546325e-05, |
|
"loss": 0.5716, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 1.1060702875399362e-05, |
|
"loss": 0.677, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 1.1022364217252397e-05, |
|
"loss": 0.5756, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 1.0984025559105432e-05, |
|
"loss": 0.6124, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 1.0945686900958467e-05, |
|
"loss": 0.6069, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 1.0907348242811502e-05, |
|
"loss": 0.6916, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 1.0869009584664538e-05, |
|
"loss": 0.6452, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 1.0830670926517573e-05, |
|
"loss": 0.7066, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 1.0792332268370606e-05, |
|
"loss": 0.6098, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 1.0753993610223641e-05, |
|
"loss": 0.6323, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 1.0715654952076677e-05, |
|
"loss": 0.6131, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 1.0677316293929712e-05, |
|
"loss": 0.6357, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 1.0638977635782749e-05, |
|
"loss": 0.5846, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 1.0600638977635784e-05, |
|
"loss": 0.5387, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 1.0562300319488819e-05, |
|
"loss": 0.5785, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 1.0523961661341854e-05, |
|
"loss": 0.6075, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 1.048562300319489e-05, |
|
"loss": 0.6335, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 1.0447284345047924e-05, |
|
"loss": 0.6515, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 1.0408945686900958e-05, |
|
"loss": 0.5713, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 1.0370607028753993e-05, |
|
"loss": 0.556, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 1.0332268370607028e-05, |
|
"loss": 0.6185, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 1.0293929712460063e-05, |
|
"loss": 0.4983, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 1.0255591054313099e-05, |
|
"loss": 0.5656, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 1.0217252396166134e-05, |
|
"loss": 0.4931, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 1.017891373801917e-05, |
|
"loss": 0.6462, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 1.0140575079872206e-05, |
|
"loss": 0.6749, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 1.0102236421725241e-05, |
|
"loss": 0.5857, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 1.0063897763578276e-05, |
|
"loss": 0.5665, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 1.002555910543131e-05, |
|
"loss": 0.7366, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 9.987220447284345e-06, |
|
"loss": 0.6487, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 9.94888178913738e-06, |
|
"loss": 0.5507, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 9.910543130990415e-06, |
|
"loss": 0.5857, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 9.87220447284345e-06, |
|
"loss": 0.5882, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 9.833865814696485e-06, |
|
"loss": 0.6147, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 9.79552715654952e-06, |
|
"loss": 0.6745, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 9.757188498402557e-06, |
|
"loss": 0.5957, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 9.718849840255593e-06, |
|
"loss": 0.644, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 9.680511182108626e-06, |
|
"loss": 0.6858, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 9.642172523961661e-06, |
|
"loss": 0.5156, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 9.603833865814696e-06, |
|
"loss": 0.5538, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 9.565495207667732e-06, |
|
"loss": 0.6102, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 9.527156549520767e-06, |
|
"loss": 0.5974, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 9.488817891373802e-06, |
|
"loss": 0.5425, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 9.450479233226837e-06, |
|
"loss": 0.5704, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 9.412140575079872e-06, |
|
"loss": 0.68, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 9.373801916932907e-06, |
|
"loss": 0.6226, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 9.335463258785944e-06, |
|
"loss": 0.6597, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 9.297124600638978e-06, |
|
"loss": 0.6329, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 9.258785942492013e-06, |
|
"loss": 0.5686, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 9.220447284345048e-06, |
|
"loss": 0.5101, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 9.182108626198083e-06, |
|
"loss": 0.595, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 9.143769968051118e-06, |
|
"loss": 0.6005, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 9.105431309904154e-06, |
|
"loss": 0.5928, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 9.067092651757189e-06, |
|
"loss": 0.6318, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 9.028753993610224e-06, |
|
"loss": 0.6383, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 8.990415335463259e-06, |
|
"loss": 0.5876, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 8.952076677316294e-06, |
|
"loss": 0.6298, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 8.91373801916933e-06, |
|
"loss": 0.5772, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 8.875399361022365e-06, |
|
"loss": 0.7086, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 8.8370607028754e-06, |
|
"loss": 0.5835, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 8.798722044728435e-06, |
|
"loss": 0.596, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 8.76038338658147e-06, |
|
"loss": 0.6359, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 8.722044728434505e-06, |
|
"loss": 0.5388, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 8.68370607028754e-06, |
|
"loss": 0.6212, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 8.645367412140575e-06, |
|
"loss": 0.6414, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 8.60702875399361e-06, |
|
"loss": 0.6626, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 8.568690095846646e-06, |
|
"loss": 0.6268, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 8.53035143769968e-06, |
|
"loss": 0.6053, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 3.58, |
|
"learning_rate": 8.492012779552714e-06, |
|
"loss": 0.592, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 8.453674121405751e-06, |
|
"loss": 0.5316, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 8.415335463258786e-06, |
|
"loss": 0.6674, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 8.376996805111822e-06, |
|
"loss": 0.6909, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 8.338658146964857e-06, |
|
"loss": 0.6631, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 8.300319488817892e-06, |
|
"loss": 0.5916, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 8.261980830670927e-06, |
|
"loss": 0.6679, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 8.223642172523962e-06, |
|
"loss": 0.6809, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 8.185303514376997e-06, |
|
"loss": 0.592, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 8.146964856230031e-06, |
|
"loss": 0.6234, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 8.108626198083066e-06, |
|
"loss": 0.6471, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 8.070287539936101e-06, |
|
"loss": 0.5573, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 8.031948881789138e-06, |
|
"loss": 0.5794, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 7.993610223642173e-06, |
|
"loss": 0.5485, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 7.955271565495208e-06, |
|
"loss": 0.5733, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 7.916932907348244e-06, |
|
"loss": 0.6405, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 7.878594249201279e-06, |
|
"loss": 0.5716, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 7.840255591054314e-06, |
|
"loss": 0.5821, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 7.801916932907349e-06, |
|
"loss": 0.5737, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 7.763578274760383e-06, |
|
"loss": 0.652, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 7.725239616613418e-06, |
|
"loss": 0.5798, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 7.686900958466453e-06, |
|
"loss": 0.7334, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 7.648562300319488e-06, |
|
"loss": 0.6044, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 7.610223642172524e-06, |
|
"loss": 0.6463, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 7.571884984025559e-06, |
|
"loss": 0.5535, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 7.533546325878594e-06, |
|
"loss": 0.6259, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 7.49520766773163e-06, |
|
"loss": 0.5918, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 7.456869009584665e-06, |
|
"loss": 0.5644, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 7.4185303514377e-06, |
|
"loss": 0.583, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 7.380191693290735e-06, |
|
"loss": 0.6453, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 7.34185303514377e-06, |
|
"loss": 0.6408, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 7.3035143769968046e-06, |
|
"loss": 0.5768, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 7.2651757188498406e-06, |
|
"loss": 0.5839, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 7.226837060702876e-06, |
|
"loss": 0.5691, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 7.188498402555911e-06, |
|
"loss": 0.5965, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 7.150159744408946e-06, |
|
"loss": 0.5495, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 7.11182108626198e-06, |
|
"loss": 0.5967, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 7.0734824281150155e-06, |
|
"loss": 0.6737, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 7.0351437699680516e-06, |
|
"loss": 0.5666, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 6.996805111821087e-06, |
|
"loss": 0.6234, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 6.958466453674122e-06, |
|
"loss": 0.5168, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 6.920127795527156e-06, |
|
"loss": 0.578, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 6.881789137380191e-06, |
|
"loss": 0.7034, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 6.843450479233227e-06, |
|
"loss": 0.6059, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 6.8051118210862625e-06, |
|
"loss": 0.5167, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 6.766773162939298e-06, |
|
"loss": 0.5822, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 6.728434504792332e-06, |
|
"loss": 0.632, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 6.690095846645367e-06, |
|
"loss": 0.5465, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 6.651757188498402e-06, |
|
"loss": 0.5827, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 6.613418530351438e-06, |
|
"loss": 0.612, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 6.5750798722044735e-06, |
|
"loss": 0.6313, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 6.536741214057508e-06, |
|
"loss": 0.607, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 6.498402555910543e-06, |
|
"loss": 0.5467, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 6.460063897763578e-06, |
|
"loss": 0.6058, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 6.421725239616613e-06, |
|
"loss": 0.586, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 6.383386581469649e-06, |
|
"loss": 0.631, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 6.345047923322684e-06, |
|
"loss": 0.5772, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 6.306709265175719e-06, |
|
"loss": 0.5529, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 6.268370607028754e-06, |
|
"loss": 0.6076, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 6.230031948881789e-06, |
|
"loss": 0.5828, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 6.191693290734825e-06, |
|
"loss": 0.5867, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 6.1533546325878595e-06, |
|
"loss": 0.6618, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 6.115015974440895e-06, |
|
"loss": 0.6218, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 6.07667731629393e-06, |
|
"loss": 0.6347, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 6.038338658146965e-06, |
|
"loss": 0.6346, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 6e-06, |
|
"loss": 0.6353, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.5885913968086243, |
|
"eval_loss": 0.723468542098999, |
|
"eval_runtime": 3297.4284, |
|
"eval_samples_per_second": 1.303, |
|
"eval_steps_per_second": 0.041, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 5.961661341853035e-06, |
|
"loss": 0.6546, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 5.9233226837060705e-06, |
|
"loss": 0.5473, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 5.884984025559106e-06, |
|
"loss": 0.6159, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 5.846645367412141e-06, |
|
"loss": 0.5975, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 5.808306709265176e-06, |
|
"loss": 0.5331, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 5.76996805111821e-06, |
|
"loss": 0.5184, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 5.731629392971246e-06, |
|
"loss": 0.5705, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 5.6932907348242815e-06, |
|
"loss": 0.6562, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 5.654952076677317e-06, |
|
"loss": 0.5881, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 5.616613418530352e-06, |
|
"loss": 0.6068, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 5.578274760383386e-06, |
|
"loss": 0.5763, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 5.539936102236422e-06, |
|
"loss": 0.6508, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 5.501597444089457e-06, |
|
"loss": 0.6145, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 5.4632587859424925e-06, |
|
"loss": 0.632, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 5.424920127795528e-06, |
|
"loss": 0.6083, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 5.386581469648562e-06, |
|
"loss": 0.4836, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 5.348242811501597e-06, |
|
"loss": 0.5213, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 5.309904153354633e-06, |
|
"loss": 0.6166, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 5.271565495207668e-06, |
|
"loss": 0.6271, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 5.2332268370607034e-06, |
|
"loss": 0.5967, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 5.194888178913738e-06, |
|
"loss": 0.6001, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 5.156549520766773e-06, |
|
"loss": 0.5936, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 5.118210862619808e-06, |
|
"loss": 0.6392, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 5.079872204472844e-06, |
|
"loss": 0.6299, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 5.041533546325879e-06, |
|
"loss": 0.5395, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 5.003194888178914e-06, |
|
"loss": 0.6005, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 4.964856230031949e-06, |
|
"loss": 0.5863, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 4.926517571884984e-06, |
|
"loss": 0.5981, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 4.88817891373802e-06, |
|
"loss": 0.5974, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 4.849840255591055e-06, |
|
"loss": 0.6026, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 4.811501597444089e-06, |
|
"loss": 0.5244, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 4.7731629392971246e-06, |
|
"loss": 0.6554, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 4.73482428115016e-06, |
|
"loss": 0.6832, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 4.696485623003195e-06, |
|
"loss": 0.5667, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 4.658146964856231e-06, |
|
"loss": 0.6229, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 4.619808306709265e-06, |
|
"loss": 0.6106, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 4.5814696485623e-06, |
|
"loss": 0.6337, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 4.5431309904153356e-06, |
|
"loss": 0.5657, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 4.504792332268371e-06, |
|
"loss": 0.5111, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 4.466453674121406e-06, |
|
"loss": 0.5832, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 4.428115015974441e-06, |
|
"loss": 0.6349, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 4.389776357827476e-06, |
|
"loss": 0.6079, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 4.351437699680511e-06, |
|
"loss": 0.6025, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 4.3130990415335465e-06, |
|
"loss": 0.6749, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 4.274760383386582e-06, |
|
"loss": 0.4955, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 4.236421725239617e-06, |
|
"loss": 0.6526, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 4.198083067092652e-06, |
|
"loss": 0.5981, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 4.159744408945687e-06, |
|
"loss": 0.5532, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 4.121405750798722e-06, |
|
"loss": 0.5531, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 4.0830670926517575e-06, |
|
"loss": 0.7127, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 4.044728434504792e-06, |
|
"loss": 0.5255, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 4.006389776357828e-06, |
|
"loss": 0.5767, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 3.968051118210863e-06, |
|
"loss": 0.5786, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 3.929712460063898e-06, |
|
"loss": 0.612, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 3.8913738019169325e-06, |
|
"loss": 0.5656, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 3.853035143769968e-06, |
|
"loss": 0.5889, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 3.8146964856230033e-06, |
|
"loss": 0.5623, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 3.7763578274760384e-06, |
|
"loss": 0.5682, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 3.7380191693290736e-06, |
|
"loss": 0.6046, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 3.6996805111821087e-06, |
|
"loss": 0.5539, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 3.661341853035144e-06, |
|
"loss": 0.5942, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 3.623003194888179e-06, |
|
"loss": 0.5913, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 3.5846645367412142e-06, |
|
"loss": 0.6728, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 3.546325878594249e-06, |
|
"loss": 0.6059, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 3.5079872204472846e-06, |
|
"loss": 0.5953, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 3.4696485623003197e-06, |
|
"loss": 0.514, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 3.4313099041533545e-06, |
|
"loss": 0.5795, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 3.39297124600639e-06, |
|
"loss": 0.5996, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 3.354632587859425e-06, |
|
"loss": 0.7118, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 3.31629392971246e-06, |
|
"loss": 0.5451, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 3.2779552715654956e-06, |
|
"loss": 0.6372, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 3.2396166134185303e-06, |
|
"loss": 0.5097, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 3.201277955271566e-06, |
|
"loss": 0.7025, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 3.1629392971246006e-06, |
|
"loss": 0.5575, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 3.124600638977636e-06, |
|
"loss": 0.6481, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 3.0862619808306714e-06, |
|
"loss": 0.6648, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 3.047923322683706e-06, |
|
"loss": 0.6175, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 3.0095846645367413e-06, |
|
"loss": 0.6202, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 2.9712460063897764e-06, |
|
"loss": 0.6417, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 2.9329073482428116e-06, |
|
"loss": 0.5969, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 2.8945686900958464e-06, |
|
"loss": 0.6495, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 2.856230031948882e-06, |
|
"loss": 0.5732, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 2.817891373801917e-06, |
|
"loss": 0.6285, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 2.779552715654952e-06, |
|
"loss": 0.5838, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 2.7412140575079874e-06, |
|
"loss": 0.5399, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 2.702875399361022e-06, |
|
"loss": 0.6276, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 2.6645367412140573e-06, |
|
"loss": 0.6091, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 2.626198083067093e-06, |
|
"loss": 0.6779, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 2.5878594249201277e-06, |
|
"loss": 0.6196, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 2.5495207667731633e-06, |
|
"loss": 0.5829, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 2.511182108626198e-06, |
|
"loss": 0.6665, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 2.472843450479233e-06, |
|
"loss": 0.6329, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 2.4345047923322688e-06, |
|
"loss": 0.6559, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 2.3961661341853035e-06, |
|
"loss": 0.6463, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 2.3578274760383387e-06, |
|
"loss": 0.6325, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 2.319488817891374e-06, |
|
"loss": 0.6303, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 2.281150159744409e-06, |
|
"loss": 0.661, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 2.242811501597444e-06, |
|
"loss": 0.5913, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 2.2044728434504793e-06, |
|
"loss": 0.5916, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 2.1661341853035145e-06, |
|
"loss": 0.6081, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 2.1277955271565492e-06, |
|
"loss": 0.6071, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 2.089456869009585e-06, |
|
"loss": 0.6329, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 2.05111821086262e-06, |
|
"loss": 0.6528, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 2.0127795527156547e-06, |
|
"loss": 0.5985, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 1.9744408945686903e-06, |
|
"loss": 0.56, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 1.936102236421725e-06, |
|
"loss": 0.5896, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 1.8977635782747604e-06, |
|
"loss": 0.5485, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 1.8594249201277956e-06, |
|
"loss": 0.5567, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 1.8210862619808305e-06, |
|
"loss": 0.5926, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 1.782747603833866e-06, |
|
"loss": 0.5978, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 1.744408945686901e-06, |
|
"loss": 0.6255, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 1.7060702875399362e-06, |
|
"loss": 0.6374, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 1.6677316293929712e-06, |
|
"loss": 0.5902, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 1.6293929712460064e-06, |
|
"loss": 0.5465, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 1.5910543130990417e-06, |
|
"loss": 0.6076, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 1.5527156549520767e-06, |
|
"loss": 0.5938, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 1.5143769968051119e-06, |
|
"loss": 0.6822, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 1.476038338658147e-06, |
|
"loss": 0.6091, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 1.4376996805111822e-06, |
|
"loss": 0.6528, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 1.3993610223642171e-06, |
|
"loss": 0.4949, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 1.3610223642172525e-06, |
|
"loss": 0.564, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 1.3226837060702877e-06, |
|
"loss": 0.5988, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 1.2843450479233226e-06, |
|
"loss": 0.6042, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 1.2460063897763578e-06, |
|
"loss": 0.6425, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 1.207667731629393e-06, |
|
"loss": 0.6112, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 1.1693290734824281e-06, |
|
"loss": 0.6148, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 1.1309904153354633e-06, |
|
"loss": 0.6198, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 1.0926517571884984e-06, |
|
"loss": 0.5613, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 1.0543130990415336e-06, |
|
"loss": 0.6095, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 1.0159744408945686e-06, |
|
"loss": 0.5758, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 9.77635782747604e-07, |
|
"loss": 0.5765, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 9.39297124600639e-07, |
|
"loss": 0.5575, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 9.009584664536742e-07, |
|
"loss": 0.6533, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 8.626198083067092e-07, |
|
"loss": 0.5958, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 8.242811501597445e-07, |
|
"loss": 0.6021, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 7.859424920127796e-07, |
|
"loss": 0.6594, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 7.476038338658146e-07, |
|
"loss": 0.6509, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 7.092651757188499e-07, |
|
"loss": 0.5746, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 6.709265175718849e-07, |
|
"loss": 0.6054, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 6.325878594249202e-07, |
|
"loss": 0.6509, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 5.942492012779553e-07, |
|
"loss": 0.6549, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 5.559105431309904e-07, |
|
"loss": 0.6603, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 5.175718849840256e-07, |
|
"loss": 0.5868, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 4.792332268370607e-07, |
|
"loss": 0.5623, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 4.408945686900959e-07, |
|
"loss": 0.5934, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 4.02555910543131e-07, |
|
"loss": 0.5603, |
|
"step": 1544 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 3.6421725239616615e-07, |
|
"loss": 0.6026, |
|
"step": 1546 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 3.2587859424920126e-07, |
|
"loss": 0.6624, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 2.875399361022364e-07, |
|
"loss": 0.5714, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 2.492012779552716e-07, |
|
"loss": 0.649, |
|
"step": 1552 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 2.1086261980830673e-07, |
|
"loss": 0.6734, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 1.7252396166134187e-07, |
|
"loss": 0.566, |
|
"step": 1556 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 1.34185303514377e-07, |
|
"loss": 0.4991, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 9.584664536741214e-08, |
|
"loss": 0.5879, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 5.7507987220447286e-08, |
|
"loss": 0.7251, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 1.9169329073482428e-08, |
|
"loss": 0.6032, |
|
"step": 1564 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.5890570282936096, |
|
"eval_loss": 0.7185935974121094, |
|
"eval_runtime": 3283.9525, |
|
"eval_samples_per_second": 1.308, |
|
"eval_steps_per_second": 0.041, |
|
"step": 1565 |
|
} |
|
], |
|
"logging_steps": 2, |
|
"max_steps": 1565, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"total_flos": 3.4123046191104e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|