|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.5011305585796835, |
|
"eval_steps": 500, |
|
"global_step": 1496, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.3170796398874254, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 2.7671, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.29680852396718344, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 2.6668, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.2873156967491437, |
|
"learning_rate": 3e-06, |
|
"loss": 2.6609, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.32279830604138887, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 2.6938, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.3053378475916063, |
|
"learning_rate": 5e-06, |
|
"loss": 2.7157, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.31356199532358553, |
|
"learning_rate": 6e-06, |
|
"loss": 2.6557, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.31222297787622016, |
|
"learning_rate": 7e-06, |
|
"loss": 2.7756, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.31394414996683867, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 2.6484, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.30526319548839004, |
|
"learning_rate": 9e-06, |
|
"loss": 2.6737, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.2841622783139327, |
|
"learning_rate": 1e-05, |
|
"loss": 2.6651, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.3046154890794536, |
|
"learning_rate": 9.999997212173297e-06, |
|
"loss": 2.6241, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.3035503343057213, |
|
"learning_rate": 9.9999888486963e-06, |
|
"loss": 2.6725, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.32373433520739703, |
|
"learning_rate": 9.999974909578329e-06, |
|
"loss": 2.6226, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.31263516602351327, |
|
"learning_rate": 9.999955394834935e-06, |
|
"loss": 2.6546, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.3200414604098111, |
|
"learning_rate": 9.999930304487874e-06, |
|
"loss": 2.703, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.2733658015391629, |
|
"learning_rate": 9.99989963856513e-06, |
|
"loss": 2.6281, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.29070953298516844, |
|
"learning_rate": 9.999863397100895e-06, |
|
"loss": 2.6899, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.2688747648684131, |
|
"learning_rate": 9.999821580135584e-06, |
|
"loss": 2.6487, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.30758393123030825, |
|
"learning_rate": 9.999774187715831e-06, |
|
"loss": 2.7497, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.2828363345308586, |
|
"learning_rate": 9.999721219894482e-06, |
|
"loss": 2.5763, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.2663942725881501, |
|
"learning_rate": 9.999662676730606e-06, |
|
"loss": 2.6937, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.2689492534938777, |
|
"learning_rate": 9.999598558289483e-06, |
|
"loss": 2.6607, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.2721982530512938, |
|
"learning_rate": 9.999528864642613e-06, |
|
"loss": 2.6965, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.30924763996084126, |
|
"learning_rate": 9.999453595867717e-06, |
|
"loss": 2.7285, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.31069570849542394, |
|
"learning_rate": 9.999372752048729e-06, |
|
"loss": 2.7327, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.28105572772546, |
|
"learning_rate": 9.999286333275797e-06, |
|
"loss": 2.7391, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.29124281802756785, |
|
"learning_rate": 9.999194339645292e-06, |
|
"loss": 2.736, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.28911610699083024, |
|
"learning_rate": 9.999096771259797e-06, |
|
"loss": 2.7244, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.3218998422572818, |
|
"learning_rate": 9.998993628228117e-06, |
|
"loss": 2.6511, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.29829283998012357, |
|
"learning_rate": 9.998884910665267e-06, |
|
"loss": 2.6505, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.2871946091545111, |
|
"learning_rate": 9.998770618692483e-06, |
|
"loss": 2.672, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.2921996511900815, |
|
"learning_rate": 9.998650752437214e-06, |
|
"loss": 2.7574, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.2882670075948252, |
|
"learning_rate": 9.998525312033127e-06, |
|
"loss": 2.6553, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.29239646966364624, |
|
"learning_rate": 9.998394297620104e-06, |
|
"loss": 2.7093, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.27417884873271275, |
|
"learning_rate": 9.998257709344246e-06, |
|
"loss": 2.7521, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.2841800938763941, |
|
"learning_rate": 9.998115547357862e-06, |
|
"loss": 2.6787, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.2856884783984889, |
|
"learning_rate": 9.997967811819485e-06, |
|
"loss": 2.6779, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.2683793206277986, |
|
"learning_rate": 9.997814502893857e-06, |
|
"loss": 2.6535, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.26118210674650083, |
|
"learning_rate": 9.997655620751939e-06, |
|
"loss": 2.543, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.284127574743679, |
|
"learning_rate": 9.997491165570907e-06, |
|
"loss": 2.6393, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.28201563255626655, |
|
"learning_rate": 9.997321137534146e-06, |
|
"loss": 2.6337, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.26033949026291303, |
|
"learning_rate": 9.997145536831262e-06, |
|
"loss": 2.6164, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.2988779986281759, |
|
"learning_rate": 9.996964363658072e-06, |
|
"loss": 2.5932, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.2759490337531855, |
|
"learning_rate": 9.996777618216608e-06, |
|
"loss": 2.684, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.28293313936126274, |
|
"learning_rate": 9.996585300715117e-06, |
|
"loss": 2.6431, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.2847363633557033, |
|
"learning_rate": 9.996387411368053e-06, |
|
"loss": 2.6603, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.28381392284071094, |
|
"learning_rate": 9.996183950396094e-06, |
|
"loss": 2.6464, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.2916519709131871, |
|
"learning_rate": 9.995974918026125e-06, |
|
"loss": 2.6885, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.2700571651342325, |
|
"learning_rate": 9.995760314491243e-06, |
|
"loss": 2.6409, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.277172183309257, |
|
"learning_rate": 9.995540140030759e-06, |
|
"loss": 2.6321, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.2721933123803388, |
|
"learning_rate": 9.995314394890196e-06, |
|
"loss": 2.6912, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.2859308096097758, |
|
"learning_rate": 9.99508307932129e-06, |
|
"loss": 2.692, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.2988143743206093, |
|
"learning_rate": 9.99484619358199e-06, |
|
"loss": 2.6676, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.287084630302098, |
|
"learning_rate": 9.994603737936448e-06, |
|
"loss": 2.6356, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.28178162133982554, |
|
"learning_rate": 9.99435571265504e-06, |
|
"loss": 2.5701, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.26758913727535316, |
|
"learning_rate": 9.994102118014346e-06, |
|
"loss": 2.5164, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.27606618820293444, |
|
"learning_rate": 9.993842954297155e-06, |
|
"loss": 2.5834, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.27242713636520005, |
|
"learning_rate": 9.993578221792469e-06, |
|
"loss": 2.7215, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.2839211405332648, |
|
"learning_rate": 9.9933079207955e-06, |
|
"loss": 2.635, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.2685892394986992, |
|
"learning_rate": 9.99303205160767e-06, |
|
"loss": 2.7163, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.2697296518070164, |
|
"learning_rate": 9.992750614536606e-06, |
|
"loss": 2.5452, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.2854401338053572, |
|
"learning_rate": 9.99246360989615e-06, |
|
"loss": 2.5883, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.29011457596920015, |
|
"learning_rate": 9.992171038006346e-06, |
|
"loss": 2.6617, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.2712048800249517, |
|
"learning_rate": 9.991872899193455e-06, |
|
"loss": 2.6344, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.29054941045481053, |
|
"learning_rate": 9.991569193789938e-06, |
|
"loss": 2.7087, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.26950023306799387, |
|
"learning_rate": 9.991259922134466e-06, |
|
"loss": 2.6186, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.2612400944442634, |
|
"learning_rate": 9.99094508457192e-06, |
|
"loss": 2.6211, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.2714769376106506, |
|
"learning_rate": 9.99062468145338e-06, |
|
"loss": 2.708, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.2896534832484691, |
|
"learning_rate": 9.99029871313614e-06, |
|
"loss": 2.6488, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.2734636975408946, |
|
"learning_rate": 9.989967179983699e-06, |
|
"loss": 2.617, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.29347749407064544, |
|
"learning_rate": 9.989630082365757e-06, |
|
"loss": 2.5147, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.26482740031050994, |
|
"learning_rate": 9.989287420658224e-06, |
|
"loss": 2.5581, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.2795723576861998, |
|
"learning_rate": 9.98893919524321e-06, |
|
"loss": 2.5844, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.3024349947315377, |
|
"learning_rate": 9.988585406509035e-06, |
|
"loss": 2.6604, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.3063626335259863, |
|
"learning_rate": 9.988226054850218e-06, |
|
"loss": 2.5631, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.29619182230702906, |
|
"learning_rate": 9.987861140667483e-06, |
|
"loss": 2.7107, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.2863355575783403, |
|
"learning_rate": 9.987490664367758e-06, |
|
"loss": 2.7283, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.29317077029265626, |
|
"learning_rate": 9.987114626364172e-06, |
|
"loss": 2.6591, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.27596698806471515, |
|
"learning_rate": 9.986733027076054e-06, |
|
"loss": 2.6256, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.27656717781378126, |
|
"learning_rate": 9.98634586692894e-06, |
|
"loss": 2.656, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.28757181359132244, |
|
"learning_rate": 9.985953146354566e-06, |
|
"loss": 2.7137, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.2853580962365858, |
|
"learning_rate": 9.985554865790862e-06, |
|
"loss": 2.7061, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.29503022029015374, |
|
"learning_rate": 9.985151025681964e-06, |
|
"loss": 2.6384, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.28368753243458983, |
|
"learning_rate": 9.98474162647821e-06, |
|
"loss": 2.6624, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.28280698760335077, |
|
"learning_rate": 9.984326668636131e-06, |
|
"loss": 2.6416, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.2860258768386144, |
|
"learning_rate": 9.983906152618458e-06, |
|
"loss": 2.5637, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.284560220784243, |
|
"learning_rate": 9.983480078894125e-06, |
|
"loss": 2.6423, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.30396814279935847, |
|
"learning_rate": 9.983048447938255e-06, |
|
"loss": 2.8193, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.2767383522832132, |
|
"learning_rate": 9.982611260232177e-06, |
|
"loss": 2.6299, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.2930929175146555, |
|
"learning_rate": 9.98216851626341e-06, |
|
"loss": 2.5475, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.30608721318647636, |
|
"learning_rate": 9.981720216525675e-06, |
|
"loss": 2.5872, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.3003738914561364, |
|
"learning_rate": 9.98126636151888e-06, |
|
"loss": 2.5907, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.3013039295708162, |
|
"learning_rate": 9.980806951749134e-06, |
|
"loss": 2.7558, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.3142726332807136, |
|
"learning_rate": 9.98034198772874e-06, |
|
"loss": 2.5396, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.29862717497789243, |
|
"learning_rate": 9.979871469976197e-06, |
|
"loss": 2.6472, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.28702408957648073, |
|
"learning_rate": 9.979395399016186e-06, |
|
"loss": 2.6004, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.3240509173310664, |
|
"learning_rate": 9.978913775379595e-06, |
|
"loss": 2.6406, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.28422751794327694, |
|
"learning_rate": 9.978426599603492e-06, |
|
"loss": 2.6116, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.29107866308539426, |
|
"learning_rate": 9.977933872231148e-06, |
|
"loss": 2.7328, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.2952731989387591, |
|
"learning_rate": 9.977435593812013e-06, |
|
"loss": 2.6816, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.29516315064408405, |
|
"learning_rate": 9.976931764901733e-06, |
|
"loss": 2.6957, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.30826242985921637, |
|
"learning_rate": 9.976422386062147e-06, |
|
"loss": 2.6688, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.31006008358094334, |
|
"learning_rate": 9.975907457861276e-06, |
|
"loss": 2.6719, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.3066420585453031, |
|
"learning_rate": 9.975386980873332e-06, |
|
"loss": 2.7008, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.2927576957883402, |
|
"learning_rate": 9.974860955678715e-06, |
|
"loss": 2.6419, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.29451095298628577, |
|
"learning_rate": 9.974329382864013e-06, |
|
"loss": 2.5173, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.2950560536107638, |
|
"learning_rate": 9.973792263021998e-06, |
|
"loss": 2.5954, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.30334538972041447, |
|
"learning_rate": 9.97324959675163e-06, |
|
"loss": 2.698, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.295569951999041, |
|
"learning_rate": 9.972701384658052e-06, |
|
"loss": 2.6575, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.2779283004801193, |
|
"learning_rate": 9.972147627352593e-06, |
|
"loss": 2.5847, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.29050023569534117, |
|
"learning_rate": 9.971588325452764e-06, |
|
"loss": 2.632, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.28944011458219093, |
|
"learning_rate": 9.971023479582258e-06, |
|
"loss": 2.6059, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.30361043855717296, |
|
"learning_rate": 9.970453090370955e-06, |
|
"loss": 2.6696, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.2903070720744162, |
|
"learning_rate": 9.969877158454912e-06, |
|
"loss": 2.6237, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.2947711559457182, |
|
"learning_rate": 9.96929568447637e-06, |
|
"loss": 2.5382, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.3135860050045197, |
|
"learning_rate": 9.968708669083744e-06, |
|
"loss": 2.4986, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.28678359148213384, |
|
"learning_rate": 9.96811611293164e-06, |
|
"loss": 2.4735, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.2916898789138187, |
|
"learning_rate": 9.967518016680828e-06, |
|
"loss": 2.6435, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.2972152291286803, |
|
"learning_rate": 9.966914380998267e-06, |
|
"loss": 2.6284, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.3188049030703794, |
|
"learning_rate": 9.966305206557092e-06, |
|
"loss": 2.6791, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.2735280976751751, |
|
"learning_rate": 9.965690494036608e-06, |
|
"loss": 2.5355, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.3085365242350297, |
|
"learning_rate": 9.965070244122302e-06, |
|
"loss": 2.6272, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.28189250618902306, |
|
"learning_rate": 9.964444457505833e-06, |
|
"loss": 2.6136, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.2909831086496679, |
|
"learning_rate": 9.963813134885035e-06, |
|
"loss": 2.5946, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.32772962273481576, |
|
"learning_rate": 9.963176276963916e-06, |
|
"loss": 2.5512, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.29674052656232275, |
|
"learning_rate": 9.962533884452654e-06, |
|
"loss": 2.6141, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.32953053258652276, |
|
"learning_rate": 9.961885958067604e-06, |
|
"loss": 2.5961, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.296659441540278, |
|
"learning_rate": 9.961232498531283e-06, |
|
"loss": 2.5345, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.29525949719018313, |
|
"learning_rate": 9.960573506572391e-06, |
|
"loss": 2.6226, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.3157983285097481, |
|
"learning_rate": 9.959908982925783e-06, |
|
"loss": 2.7319, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.28286217521963397, |
|
"learning_rate": 9.959238928332494e-06, |
|
"loss": 2.7099, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.2989054935005055, |
|
"learning_rate": 9.958563343539723e-06, |
|
"loss": 2.5254, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.31605549196587157, |
|
"learning_rate": 9.95788222930083e-06, |
|
"loss": 2.63, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.2942778521800686, |
|
"learning_rate": 9.957195586375352e-06, |
|
"loss": 2.6654, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.2841294889913612, |
|
"learning_rate": 9.956503415528984e-06, |
|
"loss": 2.5773, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.33126003232194934, |
|
"learning_rate": 9.955805717533586e-06, |
|
"loss": 2.4466, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.29995460365464693, |
|
"learning_rate": 9.955102493167184e-06, |
|
"loss": 2.6908, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.3097272168432938, |
|
"learning_rate": 9.954393743213965e-06, |
|
"loss": 2.6364, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.29472319118829043, |
|
"learning_rate": 9.953679468464274e-06, |
|
"loss": 2.6355, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.30533560268177895, |
|
"learning_rate": 9.952959669714627e-06, |
|
"loss": 2.642, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.30490767031205196, |
|
"learning_rate": 9.952234347767688e-06, |
|
"loss": 2.625, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.30108111160509854, |
|
"learning_rate": 9.95150350343229e-06, |
|
"loss": 2.5336, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.35556322456241785, |
|
"learning_rate": 9.950767137523417e-06, |
|
"loss": 2.532, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.2878834805061441, |
|
"learning_rate": 9.950025250862214e-06, |
|
"loss": 2.5405, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.3164131493463764, |
|
"learning_rate": 9.94927784427598e-06, |
|
"loss": 2.6325, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.3003951728387244, |
|
"learning_rate": 9.948524918598175e-06, |
|
"loss": 2.6077, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.3006003690530792, |
|
"learning_rate": 9.947766474668407e-06, |
|
"loss": 2.6045, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.3033727253017958, |
|
"learning_rate": 9.947002513332441e-06, |
|
"loss": 2.6125, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.28736676466741445, |
|
"learning_rate": 9.946233035442191e-06, |
|
"loss": 2.598, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.29270217211382965, |
|
"learning_rate": 9.945458041855732e-06, |
|
"loss": 2.5943, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.3048173187700944, |
|
"learning_rate": 9.944677533437274e-06, |
|
"loss": 2.6109, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.31512121937325066, |
|
"learning_rate": 9.943891511057193e-06, |
|
"loss": 2.5965, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.32931439227554016, |
|
"learning_rate": 9.943099975592003e-06, |
|
"loss": 2.7177, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.3125427693663409, |
|
"learning_rate": 9.942302927924372e-06, |
|
"loss": 2.7429, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.31511491830731564, |
|
"learning_rate": 9.941500368943111e-06, |
|
"loss": 2.6877, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.3110005885008245, |
|
"learning_rate": 9.940692299543177e-06, |
|
"loss": 2.6596, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.33338215837921104, |
|
"learning_rate": 9.939878720625675e-06, |
|
"loss": 2.574, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.3057233369045081, |
|
"learning_rate": 9.939059633097852e-06, |
|
"loss": 2.6993, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.3180182331769005, |
|
"learning_rate": 9.938235037873094e-06, |
|
"loss": 2.6828, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.29025477598221844, |
|
"learning_rate": 9.937404935870938e-06, |
|
"loss": 2.6985, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.2984082132718923, |
|
"learning_rate": 9.936569328017052e-06, |
|
"loss": 2.6061, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.3062414255693518, |
|
"learning_rate": 9.935728215243248e-06, |
|
"loss": 2.7174, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.3246654168330274, |
|
"learning_rate": 9.934881598487478e-06, |
|
"loss": 2.5767, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.3024015011695911, |
|
"learning_rate": 9.934029478693832e-06, |
|
"loss": 2.7215, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.29285564782553547, |
|
"learning_rate": 9.933171856812533e-06, |
|
"loss": 2.6268, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.30639638710780326, |
|
"learning_rate": 9.93230873379994e-06, |
|
"loss": 2.5962, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.30924253228683957, |
|
"learning_rate": 9.931440110618551e-06, |
|
"loss": 2.5709, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.3072053847437663, |
|
"learning_rate": 9.930565988236992e-06, |
|
"loss": 2.6555, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.3092824759025904, |
|
"learning_rate": 9.929686367630025e-06, |
|
"loss": 2.5353, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.2986363811791144, |
|
"learning_rate": 9.92880124977854e-06, |
|
"loss": 2.5062, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.31033745188051043, |
|
"learning_rate": 9.927910635669563e-06, |
|
"loss": 2.4532, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.31699134746635294, |
|
"learning_rate": 9.92701452629624e-06, |
|
"loss": 2.7133, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.30793109197125706, |
|
"learning_rate": 9.926112922657857e-06, |
|
"loss": 2.6064, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.31386250063346854, |
|
"learning_rate": 9.925205825759813e-06, |
|
"loss": 2.6756, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.320593710482426, |
|
"learning_rate": 9.924293236613643e-06, |
|
"loss": 2.702, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.3156087384897636, |
|
"learning_rate": 9.923375156237001e-06, |
|
"loss": 2.6653, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.31854517610852556, |
|
"learning_rate": 9.922451585653668e-06, |
|
"loss": 2.5921, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.29132946424311973, |
|
"learning_rate": 9.921522525893548e-06, |
|
"loss": 2.5643, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.3053782409475688, |
|
"learning_rate": 9.92058797799266e-06, |
|
"loss": 2.6313, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.3212158908250996, |
|
"learning_rate": 9.91964794299315e-06, |
|
"loss": 2.5354, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.31759359549627775, |
|
"learning_rate": 9.918702421943279e-06, |
|
"loss": 2.6066, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.35379468325614266, |
|
"learning_rate": 9.917751415897424e-06, |
|
"loss": 2.6648, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.309402095205423, |
|
"learning_rate": 9.916794925916084e-06, |
|
"loss": 2.6547, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.3200272243437652, |
|
"learning_rate": 9.915832953065872e-06, |
|
"loss": 2.6266, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.33823480881963486, |
|
"learning_rate": 9.91486549841951e-06, |
|
"loss": 2.5839, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.30035503955484993, |
|
"learning_rate": 9.913892563055838e-06, |
|
"loss": 2.5857, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.29683903426101027, |
|
"learning_rate": 9.912914148059805e-06, |
|
"loss": 2.5867, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.3271230659325653, |
|
"learning_rate": 9.91193025452247e-06, |
|
"loss": 2.5397, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.30470387096928925, |
|
"learning_rate": 9.910940883541009e-06, |
|
"loss": 2.5745, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.31498884686525297, |
|
"learning_rate": 9.909946036218694e-06, |
|
"loss": 2.5992, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.3418035670034064, |
|
"learning_rate": 9.908945713664912e-06, |
|
"loss": 2.6089, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.34197561767453183, |
|
"learning_rate": 9.907939916995154e-06, |
|
"loss": 2.6431, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.32498858871967207, |
|
"learning_rate": 9.906928647331011e-06, |
|
"loss": 2.6333, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.31437972497517663, |
|
"learning_rate": 9.905911905800186e-06, |
|
"loss": 2.6326, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3133510801919385, |
|
"learning_rate": 9.904889693536475e-06, |
|
"loss": 2.6042, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3507289329205512, |
|
"learning_rate": 9.903862011679781e-06, |
|
"loss": 2.6468, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.31804925054207894, |
|
"learning_rate": 9.902828861376101e-06, |
|
"loss": 2.5869, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.32612924811219846, |
|
"learning_rate": 9.901790243777534e-06, |
|
"loss": 2.5671, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3120656810058216, |
|
"learning_rate": 9.900746160042273e-06, |
|
"loss": 2.5271, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3010484944903533, |
|
"learning_rate": 9.899696611334612e-06, |
|
"loss": 2.6452, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3239853259961015, |
|
"learning_rate": 9.89864159882493e-06, |
|
"loss": 2.7043, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.31146310204052247, |
|
"learning_rate": 9.897581123689705e-06, |
|
"loss": 2.6428, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.32324911380964977, |
|
"learning_rate": 9.896515187111508e-06, |
|
"loss": 2.5365, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3035841060643951, |
|
"learning_rate": 9.895443790278995e-06, |
|
"loss": 2.5286, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3122895366531592, |
|
"learning_rate": 9.894366934386913e-06, |
|
"loss": 2.7649, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3139971157354563, |
|
"learning_rate": 9.8932846206361e-06, |
|
"loss": 2.5452, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.32275803833132877, |
|
"learning_rate": 9.892196850233474e-06, |
|
"loss": 2.6, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.31242104963540335, |
|
"learning_rate": 9.891103624392042e-06, |
|
"loss": 2.5098, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3068416325400618, |
|
"learning_rate": 9.890004944330895e-06, |
|
"loss": 2.5743, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.4426076961648037, |
|
"learning_rate": 9.888900811275205e-06, |
|
"loss": 2.6019, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.32038220368233306, |
|
"learning_rate": 9.887791226456223e-06, |
|
"loss": 2.6093, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3189093163589122, |
|
"learning_rate": 9.886676191111283e-06, |
|
"loss": 2.5176, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.2975276499008464, |
|
"learning_rate": 9.885555706483792e-06, |
|
"loss": 2.6061, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3416785116972448, |
|
"learning_rate": 9.884429773823238e-06, |
|
"loss": 2.6776, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3324805417498029, |
|
"learning_rate": 9.883298394385186e-06, |
|
"loss": 2.4792, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.30942312887340606, |
|
"learning_rate": 9.88216156943127e-06, |
|
"loss": 2.636, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3331045418589145, |
|
"learning_rate": 9.881019300229196e-06, |
|
"loss": 2.6341, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.36589877238848295, |
|
"learning_rate": 9.879871588052748e-06, |
|
"loss": 2.4853, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.32143496183446163, |
|
"learning_rate": 9.87871843418177e-06, |
|
"loss": 2.6916, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3367329207268536, |
|
"learning_rate": 9.877559839902185e-06, |
|
"loss": 2.6412, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.32310923351581927, |
|
"learning_rate": 9.876395806505972e-06, |
|
"loss": 2.6126, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3148934971054658, |
|
"learning_rate": 9.875226335291181e-06, |
|
"loss": 2.6381, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.3074710317085622, |
|
"learning_rate": 9.874051427561929e-06, |
|
"loss": 2.7002, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.34735533732290175, |
|
"learning_rate": 9.872871084628387e-06, |
|
"loss": 2.6666, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.3058697188419932, |
|
"learning_rate": 9.871685307806796e-06, |
|
"loss": 2.6453, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.3263148148780988, |
|
"learning_rate": 9.870494098419448e-06, |
|
"loss": 2.5654, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.32436238979164406, |
|
"learning_rate": 9.869297457794698e-06, |
|
"loss": 2.6129, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.3428513310711877, |
|
"learning_rate": 9.868095387266961e-06, |
|
"loss": 2.5645, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.3228143817148822, |
|
"learning_rate": 9.866887888176697e-06, |
|
"loss": 2.5796, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.31185480509183455, |
|
"learning_rate": 9.865674961870428e-06, |
|
"loss": 2.5712, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.33915296042223153, |
|
"learning_rate": 9.864456609700726e-06, |
|
"loss": 2.7518, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.30521825876252084, |
|
"learning_rate": 9.86323283302621e-06, |
|
"loss": 2.6188, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.3312279239531816, |
|
"learning_rate": 9.862003633211555e-06, |
|
"loss": 2.6723, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.3155802081211641, |
|
"learning_rate": 9.860769011627476e-06, |
|
"loss": 2.5617, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.33185307827260807, |
|
"learning_rate": 9.859528969650739e-06, |
|
"loss": 2.5231, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.32247823002252857, |
|
"learning_rate": 9.858283508664153e-06, |
|
"loss": 2.6013, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.36392929357978193, |
|
"learning_rate": 9.857032630056569e-06, |
|
"loss": 2.6207, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.35286768129497353, |
|
"learning_rate": 9.85577633522288e-06, |
|
"loss": 2.6654, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.3451232141077575, |
|
"learning_rate": 9.854514625564018e-06, |
|
"loss": 2.5755, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.345284537223161, |
|
"learning_rate": 9.853247502486957e-06, |
|
"loss": 2.5712, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.3135899846602162, |
|
"learning_rate": 9.851974967404703e-06, |
|
"loss": 2.6604, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.38832041890168123, |
|
"learning_rate": 9.850697021736299e-06, |
|
"loss": 2.6048, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.4225646492630186, |
|
"learning_rate": 9.849413666906824e-06, |
|
"loss": 2.5395, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.32642912856178463, |
|
"learning_rate": 9.84812490434738e-06, |
|
"loss": 2.6747, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.31796660504566643, |
|
"learning_rate": 9.846830735495112e-06, |
|
"loss": 2.5409, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.35431307216240776, |
|
"learning_rate": 9.845531161793185e-06, |
|
"loss": 2.6876, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.3346496670529943, |
|
"learning_rate": 9.844226184690793e-06, |
|
"loss": 2.6058, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.32803919215153093, |
|
"learning_rate": 9.842915805643156e-06, |
|
"loss": 2.6223, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.3174652696747853, |
|
"learning_rate": 9.84160002611152e-06, |
|
"loss": 2.5318, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.3464534397356932, |
|
"learning_rate": 9.840278847563147e-06, |
|
"loss": 2.6256, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.3603315063519056, |
|
"learning_rate": 9.83895227147133e-06, |
|
"loss": 2.5634, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.37082730817007653, |
|
"learning_rate": 9.837620299315367e-06, |
|
"loss": 2.5691, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.3204057604236187, |
|
"learning_rate": 9.836282932580588e-06, |
|
"loss": 2.6175, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3185505322361351, |
|
"learning_rate": 9.834940172758327e-06, |
|
"loss": 2.6166, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3384771933662536, |
|
"learning_rate": 9.833592021345938e-06, |
|
"loss": 2.6049, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3593085061629509, |
|
"learning_rate": 9.832238479846788e-06, |
|
"loss": 2.7394, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3396962602147287, |
|
"learning_rate": 9.83087954977025e-06, |
|
"loss": 2.5878, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.33152125965893986, |
|
"learning_rate": 9.82951523263171e-06, |
|
"loss": 2.6987, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.328453399171747, |
|
"learning_rate": 9.828145529952558e-06, |
|
"loss": 2.6115, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3257263132699668, |
|
"learning_rate": 9.826770443260193e-06, |
|
"loss": 2.5727, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3215557023897131, |
|
"learning_rate": 9.825389974088019e-06, |
|
"loss": 2.6047, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.33394984016557416, |
|
"learning_rate": 9.824004123975434e-06, |
|
"loss": 2.5551, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3332665073444441, |
|
"learning_rate": 9.822612894467847e-06, |
|
"loss": 2.6769, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.31294573942582393, |
|
"learning_rate": 9.821216287116658e-06, |
|
"loss": 2.6919, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.36571617782823884, |
|
"learning_rate": 9.819814303479268e-06, |
|
"loss": 2.6413, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3238280366499427, |
|
"learning_rate": 9.81840694511907e-06, |
|
"loss": 2.5427, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3252099752700349, |
|
"learning_rate": 9.816994213605453e-06, |
|
"loss": 2.5705, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.32854133260432167, |
|
"learning_rate": 9.8155761105138e-06, |
|
"loss": 2.6458, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.37397526762013245, |
|
"learning_rate": 9.814152637425478e-06, |
|
"loss": 2.6586, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3376369075010002, |
|
"learning_rate": 9.812723795927848e-06, |
|
"loss": 2.5644, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3446187615353997, |
|
"learning_rate": 9.811289587614252e-06, |
|
"loss": 2.5545, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3244205903665975, |
|
"learning_rate": 9.809850014084022e-06, |
|
"loss": 2.5556, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.337581890379183, |
|
"learning_rate": 9.808405076942472e-06, |
|
"loss": 2.6887, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.33290044670235436, |
|
"learning_rate": 9.806954777800891e-06, |
|
"loss": 2.6148, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3186843594113733, |
|
"learning_rate": 9.805499118276555e-06, |
|
"loss": 2.6586, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3361466444273861, |
|
"learning_rate": 9.804038099992716e-06, |
|
"loss": 2.611, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3402845987243979, |
|
"learning_rate": 9.8025717245786e-06, |
|
"loss": 2.5221, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3417382980288246, |
|
"learning_rate": 9.801099993669403e-06, |
|
"loss": 2.6768, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3719689739322445, |
|
"learning_rate": 9.7996229089063e-06, |
|
"loss": 2.6276, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3350098243383065, |
|
"learning_rate": 9.798140471936437e-06, |
|
"loss": 2.5812, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3365657498619843, |
|
"learning_rate": 9.79665268441292e-06, |
|
"loss": 2.7137, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3517854831112496, |
|
"learning_rate": 9.79515954799483e-06, |
|
"loss": 2.7186, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3609844532271263, |
|
"learning_rate": 9.793661064347205e-06, |
|
"loss": 2.619, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.3440434547093879, |
|
"learning_rate": 9.792157235141054e-06, |
|
"loss": 2.6764, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.34983063201707953, |
|
"learning_rate": 9.790648062053341e-06, |
|
"loss": 2.5175, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.3400967412647235, |
|
"learning_rate": 9.789133546766992e-06, |
|
"loss": 2.7174, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.34364284665946243, |
|
"learning_rate": 9.787613690970889e-06, |
|
"loss": 2.5042, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.3193387372655362, |
|
"learning_rate": 9.78608849635987e-06, |
|
"loss": 2.6363, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.3548903947858929, |
|
"learning_rate": 9.784557964634729e-06, |
|
"loss": 2.6435, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.352340261776187, |
|
"learning_rate": 9.783022097502204e-06, |
|
"loss": 2.5075, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.3219062651106595, |
|
"learning_rate": 9.78148089667499e-06, |
|
"loss": 2.4898, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.447074149512361, |
|
"learning_rate": 9.77993436387173e-06, |
|
"loss": 2.6164, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.32502987522582333, |
|
"learning_rate": 9.778382500817006e-06, |
|
"loss": 2.6231, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.35790774148123833, |
|
"learning_rate": 9.776825309241347e-06, |
|
"loss": 2.572, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.33509687106667574, |
|
"learning_rate": 9.77526279088123e-06, |
|
"loss": 2.5331, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.3425946543760854, |
|
"learning_rate": 9.773694947479064e-06, |
|
"loss": 2.5942, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.3611654791904134, |
|
"learning_rate": 9.772121780783202e-06, |
|
"loss": 2.6132, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.3333364079254133, |
|
"learning_rate": 9.770543292547928e-06, |
|
"loss": 2.6903, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.3496197107588038, |
|
"learning_rate": 9.768959484533461e-06, |
|
"loss": 2.6104, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.3482956524862094, |
|
"learning_rate": 9.767370358505958e-06, |
|
"loss": 2.5578, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.3540980562435214, |
|
"learning_rate": 9.7657759162375e-06, |
|
"loss": 2.6823, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.3378288565389089, |
|
"learning_rate": 9.764176159506097e-06, |
|
"loss": 2.6466, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.3476414623758776, |
|
"learning_rate": 9.762571090095692e-06, |
|
"loss": 2.6817, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.3468363770470217, |
|
"learning_rate": 9.760960709796141e-06, |
|
"loss": 2.7139, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.32968773412470165, |
|
"learning_rate": 9.759345020403233e-06, |
|
"loss": 2.6294, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.32461622302088416, |
|
"learning_rate": 9.757724023718671e-06, |
|
"loss": 2.5907, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.32724000019534877, |
|
"learning_rate": 9.756097721550078e-06, |
|
"loss": 2.5887, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.31378490221572475, |
|
"learning_rate": 9.754466115710992e-06, |
|
"loss": 2.6506, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.3389812794104657, |
|
"learning_rate": 9.75282920802087e-06, |
|
"loss": 2.6872, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.3410772104137017, |
|
"learning_rate": 9.751187000305076e-06, |
|
"loss": 2.5604, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.32674524313857833, |
|
"learning_rate": 9.749539494394887e-06, |
|
"loss": 2.538, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.33858326387385124, |
|
"learning_rate": 9.747886692127486e-06, |
|
"loss": 2.6238, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.34589766292819163, |
|
"learning_rate": 9.746228595345965e-06, |
|
"loss": 2.6378, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.35188753149779034, |
|
"learning_rate": 9.744565205899317e-06, |
|
"loss": 2.5601, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.31652413044373107, |
|
"learning_rate": 9.742896525642442e-06, |
|
"loss": 2.5816, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.3409957215748307, |
|
"learning_rate": 9.741222556436132e-06, |
|
"loss": 2.6467, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.3960835352075213, |
|
"learning_rate": 9.739543300147085e-06, |
|
"loss": 2.665, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.3287165509275585, |
|
"learning_rate": 9.737858758647889e-06, |
|
"loss": 2.6474, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.3798162995387399, |
|
"learning_rate": 9.73616893381703e-06, |
|
"loss": 2.61, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.3342253836017593, |
|
"learning_rate": 9.734473827538881e-06, |
|
"loss": 2.6278, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.3555154455346524, |
|
"learning_rate": 9.73277344170371e-06, |
|
"loss": 2.6153, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.4386835777780206, |
|
"learning_rate": 9.731067778207665e-06, |
|
"loss": 2.6937, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.36322755314204486, |
|
"learning_rate": 9.729356838952788e-06, |
|
"loss": 2.6125, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.3824261253050603, |
|
"learning_rate": 9.727640625847e-06, |
|
"loss": 2.6399, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.3561152981527507, |
|
"learning_rate": 9.7259191408041e-06, |
|
"loss": 2.6995, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.31872571450635934, |
|
"learning_rate": 9.724192385743767e-06, |
|
"loss": 2.5594, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.3456621738468028, |
|
"learning_rate": 9.722460362591565e-06, |
|
"loss": 2.6167, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.31936564875982554, |
|
"learning_rate": 9.720723073278922e-06, |
|
"loss": 2.6844, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.34576928319275374, |
|
"learning_rate": 9.718980519743142e-06, |
|
"loss": 2.6024, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.34636170754314316, |
|
"learning_rate": 9.717232703927402e-06, |
|
"loss": 2.5263, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.364738168081352, |
|
"learning_rate": 9.715479627780744e-06, |
|
"loss": 2.6529, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.33147850027819137, |
|
"learning_rate": 9.713721293258079e-06, |
|
"loss": 2.5856, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.36853607567398494, |
|
"learning_rate": 9.711957702320176e-06, |
|
"loss": 2.6117, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.35143052909025574, |
|
"learning_rate": 9.710188856933671e-06, |
|
"loss": 2.5388, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.346783453227663, |
|
"learning_rate": 9.70841475907106e-06, |
|
"loss": 2.5964, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.3547742157857605, |
|
"learning_rate": 9.70663541071069e-06, |
|
"loss": 2.6319, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.3767870601456973, |
|
"learning_rate": 9.70485081383677e-06, |
|
"loss": 2.6695, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.31099203580367296, |
|
"learning_rate": 9.703060970439357e-06, |
|
"loss": 2.5908, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.3563365195434926, |
|
"learning_rate": 9.701265882514362e-06, |
|
"loss": 2.6798, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.34726289934734256, |
|
"learning_rate": 9.69946555206354e-06, |
|
"loss": 2.5322, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.3516286787687523, |
|
"learning_rate": 9.697659981094498e-06, |
|
"loss": 2.5811, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.34894053451732737, |
|
"learning_rate": 9.695849171620681e-06, |
|
"loss": 2.6177, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.33347703647479304, |
|
"learning_rate": 9.694033125661382e-06, |
|
"loss": 2.4915, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3762769415705139, |
|
"learning_rate": 9.692211845241724e-06, |
|
"loss": 2.6065, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3542465956570787, |
|
"learning_rate": 9.690385332392676e-06, |
|
"loss": 2.6482, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.33894973760786873, |
|
"learning_rate": 9.688553589151038e-06, |
|
"loss": 2.5544, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3646766361556834, |
|
"learning_rate": 9.686716617559442e-06, |
|
"loss": 2.6876, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.33444582420815583, |
|
"learning_rate": 9.684874419666355e-06, |
|
"loss": 2.6771, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3313405650870024, |
|
"learning_rate": 9.683026997526063e-06, |
|
"loss": 2.59, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.352783646632777, |
|
"learning_rate": 9.681174353198687e-06, |
|
"loss": 2.628, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3651460142908725, |
|
"learning_rate": 9.679316488750165e-06, |
|
"loss": 2.8014, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3512770871751507, |
|
"learning_rate": 9.677453406252263e-06, |
|
"loss": 2.6069, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.33320113133396406, |
|
"learning_rate": 9.675585107782557e-06, |
|
"loss": 2.5503, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3303586794567089, |
|
"learning_rate": 9.673711595424445e-06, |
|
"loss": 2.6388, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3487581727564022, |
|
"learning_rate": 9.67183287126714e-06, |
|
"loss": 2.5335, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3255407886746363, |
|
"learning_rate": 9.669948937405662e-06, |
|
"loss": 2.5475, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3361925664939444, |
|
"learning_rate": 9.668059795940846e-06, |
|
"loss": 2.666, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3897837927203283, |
|
"learning_rate": 9.66616544897933e-06, |
|
"loss": 2.6835, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.33831534401082114, |
|
"learning_rate": 9.664265898633558e-06, |
|
"loss": 2.4319, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.33799887450160604, |
|
"learning_rate": 9.66236114702178e-06, |
|
"loss": 2.619, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.35036876189237753, |
|
"learning_rate": 9.66045119626804e-06, |
|
"loss": 2.6624, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.36932320091198245, |
|
"learning_rate": 9.658536048502183e-06, |
|
"loss": 2.538, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.36526823651834606, |
|
"learning_rate": 9.65661570585985e-06, |
|
"loss": 2.7111, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.34134738248746543, |
|
"learning_rate": 9.654690170482474e-06, |
|
"loss": 2.6082, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3318367107667496, |
|
"learning_rate": 9.652759444517276e-06, |
|
"loss": 2.5756, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3658073133775175, |
|
"learning_rate": 9.650823530117273e-06, |
|
"loss": 2.5643, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3764878796606095, |
|
"learning_rate": 9.648882429441258e-06, |
|
"loss": 2.5856, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3520585057949475, |
|
"learning_rate": 9.646936144653813e-06, |
|
"loss": 2.5729, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.352936370229019, |
|
"learning_rate": 9.6449846779253e-06, |
|
"loss": 2.618, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3505048312671855, |
|
"learning_rate": 9.64302803143186e-06, |
|
"loss": 2.6206, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3679090066668967, |
|
"learning_rate": 9.64106620735541e-06, |
|
"loss": 2.5719, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.34289501892250546, |
|
"learning_rate": 9.63909920788364e-06, |
|
"loss": 2.5773, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.4083631648954588, |
|
"learning_rate": 9.63712703521001e-06, |
|
"loss": 2.5552, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.3623205217464116, |
|
"learning_rate": 9.635149691533749e-06, |
|
"loss": 2.5894, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.38387843054900256, |
|
"learning_rate": 9.633167179059859e-06, |
|
"loss": 2.6483, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.3531254253553681, |
|
"learning_rate": 9.631179499999094e-06, |
|
"loss": 2.6182, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.382022470634699, |
|
"learning_rate": 9.629186656567981e-06, |
|
"loss": 2.7027, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.35043041424351157, |
|
"learning_rate": 9.6271886509888e-06, |
|
"loss": 2.5979, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.501125499455809, |
|
"learning_rate": 9.625185485489586e-06, |
|
"loss": 2.5574, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.34841115834861225, |
|
"learning_rate": 9.623177162304132e-06, |
|
"loss": 2.6124, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.36414263747845266, |
|
"learning_rate": 9.62116368367198e-06, |
|
"loss": 2.6047, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.3621790789002891, |
|
"learning_rate": 9.619145051838424e-06, |
|
"loss": 2.6431, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.3423299610454867, |
|
"learning_rate": 9.617121269054497e-06, |
|
"loss": 2.6453, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.34946611335508165, |
|
"learning_rate": 9.615092337576987e-06, |
|
"loss": 2.5827, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.3795298920344452, |
|
"learning_rate": 9.613058259668416e-06, |
|
"loss": 2.5943, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.36580476743110946, |
|
"learning_rate": 9.611019037597045e-06, |
|
"loss": 2.6066, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.36676561183764467, |
|
"learning_rate": 9.608974673636874e-06, |
|
"loss": 2.4641, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.3799683344660138, |
|
"learning_rate": 9.606925170067637e-06, |
|
"loss": 2.624, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.38874616823423863, |
|
"learning_rate": 9.604870529174797e-06, |
|
"loss": 2.5052, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.35818572839323115, |
|
"learning_rate": 9.602810753249549e-06, |
|
"loss": 2.5451, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.35520000569192134, |
|
"learning_rate": 9.60074584458881e-06, |
|
"loss": 2.5498, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.3429831599551629, |
|
"learning_rate": 9.598675805495224e-06, |
|
"loss": 2.5865, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.34755300210268897, |
|
"learning_rate": 9.596600638277157e-06, |
|
"loss": 2.548, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.3592352180638645, |
|
"learning_rate": 9.594520345248687e-06, |
|
"loss": 2.712, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.3427075162354133, |
|
"learning_rate": 9.592434928729617e-06, |
|
"loss": 2.5966, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.3588942754040752, |
|
"learning_rate": 9.590344391045454e-06, |
|
"loss": 2.5614, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.37073559794680655, |
|
"learning_rate": 9.588248734527428e-06, |
|
"loss": 2.6412, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.3832373499776409, |
|
"learning_rate": 9.586147961512464e-06, |
|
"loss": 2.7403, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.3788810996921007, |
|
"learning_rate": 9.5840420743432e-06, |
|
"loss": 2.6184, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.3786757839938464, |
|
"learning_rate": 9.581931075367979e-06, |
|
"loss": 2.5637, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.3707176712124623, |
|
"learning_rate": 9.579814966940833e-06, |
|
"loss": 2.5527, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.3872245794218741, |
|
"learning_rate": 9.577693751421507e-06, |
|
"loss": 2.7156, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.35357111289889626, |
|
"learning_rate": 9.575567431175427e-06, |
|
"loss": 2.6885, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.3475328290649796, |
|
"learning_rate": 9.573436008573724e-06, |
|
"loss": 2.55, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.3691908383403371, |
|
"learning_rate": 9.57129948599321e-06, |
|
"loss": 2.5979, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.3776062362438795, |
|
"learning_rate": 9.569157865816386e-06, |
|
"loss": 2.7273, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.36309179883045734, |
|
"learning_rate": 9.56701115043144e-06, |
|
"loss": 2.6366, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.3635509269358371, |
|
"learning_rate": 9.56485934223224e-06, |
|
"loss": 2.599, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.37084533005857817, |
|
"learning_rate": 9.562702443618332e-06, |
|
"loss": 2.6457, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.36848577310953384, |
|
"learning_rate": 9.56054045699494e-06, |
|
"loss": 2.5668, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.42518835171568375, |
|
"learning_rate": 9.558373384772964e-06, |
|
"loss": 2.5535, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.3452323035069904, |
|
"learning_rate": 9.55620122936897e-06, |
|
"loss": 2.4259, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.34117746085207956, |
|
"learning_rate": 9.554023993205194e-06, |
|
"loss": 2.6565, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.3511654412992474, |
|
"learning_rate": 9.551841678709543e-06, |
|
"loss": 2.6814, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.3246443838619872, |
|
"learning_rate": 9.54965428831558e-06, |
|
"loss": 2.6047, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.36258121435735235, |
|
"learning_rate": 9.547461824462534e-06, |
|
"loss": 2.6464, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.34507450774639337, |
|
"learning_rate": 9.545264289595284e-06, |
|
"loss": 2.563, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.386133078004382, |
|
"learning_rate": 9.543061686164374e-06, |
|
"loss": 2.653, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.33001999653572733, |
|
"learning_rate": 9.54085401662599e-06, |
|
"loss": 2.5639, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.35995662311984156, |
|
"learning_rate": 9.538641283441974e-06, |
|
"loss": 2.6475, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.38971795630724054, |
|
"learning_rate": 9.536423489079812e-06, |
|
"loss": 2.727, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.36355223854382024, |
|
"learning_rate": 9.534200636012636e-06, |
|
"loss": 2.5898, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.3711145094034144, |
|
"learning_rate": 9.531972726719216e-06, |
|
"loss": 2.6373, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.34920429225410604, |
|
"learning_rate": 9.529739763683965e-06, |
|
"loss": 2.5074, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.3551729879198632, |
|
"learning_rate": 9.527501749396924e-06, |
|
"loss": 2.6316, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.3490101137697429, |
|
"learning_rate": 9.525258686353776e-06, |
|
"loss": 2.6075, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.32833723742847454, |
|
"learning_rate": 9.523010577055824e-06, |
|
"loss": 2.4231, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.3572609544386589, |
|
"learning_rate": 9.52075742401001e-06, |
|
"loss": 2.611, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.3731441670515594, |
|
"learning_rate": 9.518499229728889e-06, |
|
"loss": 2.6161, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.36098957181567365, |
|
"learning_rate": 9.516235996730645e-06, |
|
"loss": 2.6358, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.36118588096333676, |
|
"learning_rate": 9.513967727539079e-06, |
|
"loss": 2.6043, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.36325302834767914, |
|
"learning_rate": 9.511694424683606e-06, |
|
"loss": 2.6365, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.4035556097613667, |
|
"learning_rate": 9.509416090699258e-06, |
|
"loss": 2.6116, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.3588793695436936, |
|
"learning_rate": 9.50713272812667e-06, |
|
"loss": 2.611, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.3593171736591926, |
|
"learning_rate": 9.504844339512096e-06, |
|
"loss": 2.6189, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.37871224067231624, |
|
"learning_rate": 9.502550927407386e-06, |
|
"loss": 2.7026, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.36633980073258754, |
|
"learning_rate": 9.500252494369992e-06, |
|
"loss": 2.5955, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.3713223205151261, |
|
"learning_rate": 9.497949042962971e-06, |
|
"loss": 2.5978, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.3616355001004457, |
|
"learning_rate": 9.49564057575497e-06, |
|
"loss": 2.5843, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.3517382394448638, |
|
"learning_rate": 9.493327095320231e-06, |
|
"loss": 2.5557, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.3634880871716239, |
|
"learning_rate": 9.49100860423859e-06, |
|
"loss": 2.6457, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.3706486090076816, |
|
"learning_rate": 9.488685105095464e-06, |
|
"loss": 2.6207, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.35264587897523175, |
|
"learning_rate": 9.486356600481859e-06, |
|
"loss": 2.5636, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.357439426345557, |
|
"learning_rate": 9.484023092994366e-06, |
|
"loss": 2.6085, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.3672140395942012, |
|
"learning_rate": 9.481684585235145e-06, |
|
"loss": 2.6436, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.35536074831654074, |
|
"learning_rate": 9.479341079811938e-06, |
|
"loss": 2.6217, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.37164083609535903, |
|
"learning_rate": 9.476992579338066e-06, |
|
"loss": 2.6707, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.33771388993976, |
|
"learning_rate": 9.474639086432408e-06, |
|
"loss": 2.4938, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.3773044076000881, |
|
"learning_rate": 9.472280603719419e-06, |
|
"loss": 2.588, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.36731472246303526, |
|
"learning_rate": 9.469917133829114e-06, |
|
"loss": 2.5765, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.36401889114168867, |
|
"learning_rate": 9.467548679397072e-06, |
|
"loss": 2.6097, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.36397924330762615, |
|
"learning_rate": 9.465175243064428e-06, |
|
"loss": 2.5596, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.3479437871268676, |
|
"learning_rate": 9.462796827477873e-06, |
|
"loss": 2.5487, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.45223388523599456, |
|
"learning_rate": 9.460413435289655e-06, |
|
"loss": 2.6161, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.39806018990511033, |
|
"learning_rate": 9.458025069157563e-06, |
|
"loss": 2.5991, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.37052890554662676, |
|
"learning_rate": 9.45563173174494e-06, |
|
"loss": 2.5443, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.40424250730130273, |
|
"learning_rate": 9.45323342572067e-06, |
|
"loss": 2.4869, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.3912357514806361, |
|
"learning_rate": 9.450830153759177e-06, |
|
"loss": 2.6105, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.3519370732685063, |
|
"learning_rate": 9.448421918540424e-06, |
|
"loss": 2.6127, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.40068454653993657, |
|
"learning_rate": 9.446008722749906e-06, |
|
"loss": 2.4963, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.39281092331599454, |
|
"learning_rate": 9.443590569078655e-06, |
|
"loss": 2.5455, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.3591307763904136, |
|
"learning_rate": 9.441167460223224e-06, |
|
"loss": 2.5692, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.3759099133359235, |
|
"learning_rate": 9.4387393988857e-06, |
|
"loss": 2.5914, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.4197464687478331, |
|
"learning_rate": 9.436306387773687e-06, |
|
"loss": 2.649, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.36478768026082575, |
|
"learning_rate": 9.43386842960031e-06, |
|
"loss": 2.6149, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.38633531770095064, |
|
"learning_rate": 9.431425527084213e-06, |
|
"loss": 2.636, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.38502307934138025, |
|
"learning_rate": 9.428977682949548e-06, |
|
"loss": 2.481, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.37578040616581126, |
|
"learning_rate": 9.426524899925983e-06, |
|
"loss": 2.6481, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.36412837615381155, |
|
"learning_rate": 9.424067180748692e-06, |
|
"loss": 2.6204, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.3688629963265158, |
|
"learning_rate": 9.421604528158355e-06, |
|
"loss": 2.5546, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.37999274617098006, |
|
"learning_rate": 9.419136944901146e-06, |
|
"loss": 2.502, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.3704608338474391, |
|
"learning_rate": 9.416664433728749e-06, |
|
"loss": 2.5751, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.37304381923522467, |
|
"learning_rate": 9.414186997398331e-06, |
|
"loss": 2.6507, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.4002003287303057, |
|
"learning_rate": 9.411704638672562e-06, |
|
"loss": 2.6855, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.35759693298816747, |
|
"learning_rate": 9.409217360319594e-06, |
|
"loss": 2.6198, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.39969817039198885, |
|
"learning_rate": 9.406725165113069e-06, |
|
"loss": 2.534, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.3807147215970043, |
|
"learning_rate": 9.40422805583211e-06, |
|
"loss": 2.6171, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.3790985877712856, |
|
"learning_rate": 9.401726035261319e-06, |
|
"loss": 2.6556, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.351516529892437, |
|
"learning_rate": 9.399219106190776e-06, |
|
"loss": 2.6053, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.38937111835779886, |
|
"learning_rate": 9.396707271416035e-06, |
|
"loss": 2.6586, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.3785571820542292, |
|
"learning_rate": 9.394190533738118e-06, |
|
"loss": 2.5479, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.361269992585338, |
|
"learning_rate": 9.391668895963521e-06, |
|
"loss": 2.677, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.3488395355853058, |
|
"learning_rate": 9.389142360904197e-06, |
|
"loss": 2.5981, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.366717687311896, |
|
"learning_rate": 9.386610931377564e-06, |
|
"loss": 2.5667, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.371686802854601, |
|
"learning_rate": 9.384074610206495e-06, |
|
"loss": 2.6046, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.34937925597176434, |
|
"learning_rate": 9.381533400219319e-06, |
|
"loss": 2.5307, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.38148537077296263, |
|
"learning_rate": 9.378987304249819e-06, |
|
"loss": 2.6458, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.3526977013061218, |
|
"learning_rate": 9.376436325137224e-06, |
|
"loss": 2.5112, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.3672226422314367, |
|
"learning_rate": 9.373880465726208e-06, |
|
"loss": 2.6376, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.378500691878116, |
|
"learning_rate": 9.371319728866892e-06, |
|
"loss": 2.499, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.3964495526135052, |
|
"learning_rate": 9.368754117414828e-06, |
|
"loss": 2.5133, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.38403991576946866, |
|
"learning_rate": 9.36618363423101e-06, |
|
"loss": 2.5839, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.39166616920006725, |
|
"learning_rate": 9.363608282181862e-06, |
|
"loss": 2.6802, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.367343341952105, |
|
"learning_rate": 9.36102806413924e-06, |
|
"loss": 2.5199, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.3686087879446113, |
|
"learning_rate": 9.35844298298042e-06, |
|
"loss": 2.4803, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.36947777907644486, |
|
"learning_rate": 9.355853041588111e-06, |
|
"loss": 2.6321, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.36917330077534516, |
|
"learning_rate": 9.353258242850432e-06, |
|
"loss": 2.5247, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.37585804049584626, |
|
"learning_rate": 9.350658589660924e-06, |
|
"loss": 2.5508, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.3950181920956615, |
|
"learning_rate": 9.34805408491854e-06, |
|
"loss": 2.563, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.3612877900576221, |
|
"learning_rate": 9.345444731527642e-06, |
|
"loss": 2.5589, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.3747026735022066, |
|
"learning_rate": 9.342830532398002e-06, |
|
"loss": 2.6633, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.3707103957658041, |
|
"learning_rate": 9.340211490444793e-06, |
|
"loss": 2.5683, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.3881547065398902, |
|
"learning_rate": 9.337587608588588e-06, |
|
"loss": 2.5465, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.38485592784501677, |
|
"learning_rate": 9.33495888975536e-06, |
|
"loss": 2.5729, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.3631651703156107, |
|
"learning_rate": 9.332325336876472e-06, |
|
"loss": 2.5225, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.395946419607132, |
|
"learning_rate": 9.32968695288868e-06, |
|
"loss": 2.708, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.3639799802195103, |
|
"learning_rate": 9.327043740734129e-06, |
|
"loss": 2.6279, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.39253570555205214, |
|
"learning_rate": 9.324395703360345e-06, |
|
"loss": 2.7964, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.4062943434356018, |
|
"learning_rate": 9.321742843720234e-06, |
|
"loss": 2.5721, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.3745062677217993, |
|
"learning_rate": 9.319085164772082e-06, |
|
"loss": 2.5667, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.3816082756208765, |
|
"learning_rate": 9.31642266947955e-06, |
|
"loss": 2.6327, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.34810809443669893, |
|
"learning_rate": 9.313755360811665e-06, |
|
"loss": 2.5067, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.669410354578167, |
|
"learning_rate": 9.311083241742829e-06, |
|
"loss": 2.6153, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.3828088604501034, |
|
"learning_rate": 9.308406315252799e-06, |
|
"loss": 2.6264, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.3640939993595143, |
|
"learning_rate": 9.305724584326702e-06, |
|
"loss": 2.5744, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.36645221137260386, |
|
"learning_rate": 9.303038051955017e-06, |
|
"loss": 2.6558, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.36173421361179037, |
|
"learning_rate": 9.300346721133577e-06, |
|
"loss": 2.637, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.3479028106069891, |
|
"learning_rate": 9.297650594863572e-06, |
|
"loss": 2.627, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.3779033286984007, |
|
"learning_rate": 9.29494967615153e-06, |
|
"loss": 2.7463, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.393068131152805, |
|
"learning_rate": 9.292243968009332e-06, |
|
"loss": 2.6275, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.36336754214963424, |
|
"learning_rate": 9.289533473454194e-06, |
|
"loss": 2.5236, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.37280173510784137, |
|
"learning_rate": 9.286818195508672e-06, |
|
"loss": 2.6627, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.3814394128580523, |
|
"learning_rate": 9.284098137200656e-06, |
|
"loss": 2.6625, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.35570304610873393, |
|
"learning_rate": 9.281373301563369e-06, |
|
"loss": 2.6041, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.3796685602621214, |
|
"learning_rate": 9.278643691635352e-06, |
|
"loss": 2.5634, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.37182496960471256, |
|
"learning_rate": 9.275909310460484e-06, |
|
"loss": 2.6144, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.36597489951785306, |
|
"learning_rate": 9.273170161087954e-06, |
|
"loss": 2.4505, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.37863369606990943, |
|
"learning_rate": 9.270426246572273e-06, |
|
"loss": 2.5277, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.44730683515841135, |
|
"learning_rate": 9.26767756997326e-06, |
|
"loss": 2.5425, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.39472069299440277, |
|
"learning_rate": 9.264924134356057e-06, |
|
"loss": 2.7211, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.3501969413485413, |
|
"learning_rate": 9.262165942791098e-06, |
|
"loss": 2.5592, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.38360651542726115, |
|
"learning_rate": 9.259402998354125e-06, |
|
"loss": 2.6333, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.3863578615228763, |
|
"learning_rate": 9.25663530412619e-06, |
|
"loss": 2.4994, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.37848956999650335, |
|
"learning_rate": 9.253862863193625e-06, |
|
"loss": 2.5538, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.3783992836553777, |
|
"learning_rate": 9.251085678648072e-06, |
|
"loss": 2.6139, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.3972475946136305, |
|
"learning_rate": 9.248303753586449e-06, |
|
"loss": 2.6361, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.39414074900599666, |
|
"learning_rate": 9.24551709111097e-06, |
|
"loss": 2.6609, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.4031966345371276, |
|
"learning_rate": 9.242725694329123e-06, |
|
"loss": 2.5674, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.37415076496357547, |
|
"learning_rate": 9.239929566353685e-06, |
|
"loss": 2.7205, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.39152375777763443, |
|
"learning_rate": 9.2371287103027e-06, |
|
"loss": 2.6226, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.38096918332633267, |
|
"learning_rate": 9.234323129299493e-06, |
|
"loss": 2.5059, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.3624078649965304, |
|
"learning_rate": 9.231512826472651e-06, |
|
"loss": 2.6158, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.38864706909152263, |
|
"learning_rate": 9.228697804956027e-06, |
|
"loss": 2.7084, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.3641660027457906, |
|
"learning_rate": 9.225878067888741e-06, |
|
"loss": 2.5334, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.38568072251846647, |
|
"learning_rate": 9.223053618415168e-06, |
|
"loss": 2.5861, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.40430584957463456, |
|
"learning_rate": 9.220224459684936e-06, |
|
"loss": 2.6363, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.40214767122496736, |
|
"learning_rate": 9.217390594852928e-06, |
|
"loss": 2.491, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.37009409729286524, |
|
"learning_rate": 9.214552027079274e-06, |
|
"loss": 2.5962, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.3701496764243851, |
|
"learning_rate": 9.211708759529347e-06, |
|
"loss": 2.6619, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.38085250848302366, |
|
"learning_rate": 9.208860795373765e-06, |
|
"loss": 2.5934, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.3703456767225791, |
|
"learning_rate": 9.206008137788376e-06, |
|
"loss": 2.5907, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.37521457493596533, |
|
"learning_rate": 9.203150789954269e-06, |
|
"loss": 2.6769, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.3926093625218847, |
|
"learning_rate": 9.200288755057757e-06, |
|
"loss": 2.6641, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.3659477610660178, |
|
"learning_rate": 9.197422036290386e-06, |
|
"loss": 2.4708, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.36528777685770725, |
|
"learning_rate": 9.194550636848923e-06, |
|
"loss": 2.495, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.36595973236411306, |
|
"learning_rate": 9.191674559935349e-06, |
|
"loss": 2.6522, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.40533292402750815, |
|
"learning_rate": 9.18879380875687e-06, |
|
"loss": 2.6301, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.3814157774351385, |
|
"learning_rate": 9.185908386525897e-06, |
|
"loss": 2.6509, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.39954200691359565, |
|
"learning_rate": 9.183018296460055e-06, |
|
"loss": 2.58, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.3980474994268938, |
|
"learning_rate": 9.180123541782172e-06, |
|
"loss": 2.5013, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.362430108666147, |
|
"learning_rate": 9.177224125720274e-06, |
|
"loss": 2.6252, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.36535128593944177, |
|
"learning_rate": 9.174320051507595e-06, |
|
"loss": 2.5497, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.3838873584463476, |
|
"learning_rate": 9.171411322382552e-06, |
|
"loss": 2.6119, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.3698863414225833, |
|
"learning_rate": 9.16849794158876e-06, |
|
"loss": 2.5775, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.3914562726368379, |
|
"learning_rate": 9.16557991237502e-06, |
|
"loss": 2.528, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.3748930540173487, |
|
"learning_rate": 9.162657237995316e-06, |
|
"loss": 2.5201, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.36816944553477204, |
|
"learning_rate": 9.159729921708811e-06, |
|
"loss": 2.5629, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.37321218852120774, |
|
"learning_rate": 9.156797966779847e-06, |
|
"loss": 2.5419, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.37915436005095765, |
|
"learning_rate": 9.153861376477933e-06, |
|
"loss": 2.7052, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.3704407417589313, |
|
"learning_rate": 9.150920154077753e-06, |
|
"loss": 2.6223, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.38029524275808224, |
|
"learning_rate": 9.147974302859158e-06, |
|
"loss": 2.5667, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.36244234004689, |
|
"learning_rate": 9.14502382610715e-06, |
|
"loss": 2.6414, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.3957962679658596, |
|
"learning_rate": 9.1420687271119e-06, |
|
"loss": 2.63, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.37690782027680103, |
|
"learning_rate": 9.139109009168732e-06, |
|
"loss": 2.6165, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.38991806911964444, |
|
"learning_rate": 9.136144675578114e-06, |
|
"loss": 2.5532, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.37241165643256596, |
|
"learning_rate": 9.133175729645667e-06, |
|
"loss": 2.5109, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.3881669718704454, |
|
"learning_rate": 9.130202174682154e-06, |
|
"loss": 2.6121, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.3746737411938074, |
|
"learning_rate": 9.127224014003478e-06, |
|
"loss": 2.5589, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.35909899188362016, |
|
"learning_rate": 9.124241250930675e-06, |
|
"loss": 2.4821, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.37780525071199483, |
|
"learning_rate": 9.121253888789916e-06, |
|
"loss": 2.5867, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.3783001722529536, |
|
"learning_rate": 9.118261930912505e-06, |
|
"loss": 2.6057, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.37910446409582155, |
|
"learning_rate": 9.11526538063486e-06, |
|
"loss": 2.5794, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.3910669306460941, |
|
"learning_rate": 9.112264241298527e-06, |
|
"loss": 2.5727, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.3979995789286049, |
|
"learning_rate": 9.109258516250172e-06, |
|
"loss": 2.5771, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.37724573509126186, |
|
"learning_rate": 9.106248208841568e-06, |
|
"loss": 2.6118, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.37725914506211394, |
|
"learning_rate": 9.103233322429603e-06, |
|
"loss": 2.6005, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.380958974481754, |
|
"learning_rate": 9.10021386037627e-06, |
|
"loss": 2.5719, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.41455883292386886, |
|
"learning_rate": 9.09718982604866e-06, |
|
"loss": 2.5273, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.4056643886867896, |
|
"learning_rate": 9.094161222818971e-06, |
|
"loss": 2.5989, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.4087813630452031, |
|
"learning_rate": 9.091128054064487e-06, |
|
"loss": 2.529, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.40494733812427797, |
|
"learning_rate": 9.08809032316759e-06, |
|
"loss": 2.6702, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.38784529621841723, |
|
"learning_rate": 9.085048033515749e-06, |
|
"loss": 2.5724, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.36458722067077504, |
|
"learning_rate": 9.08200118850151e-06, |
|
"loss": 2.5695, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.3669722311573834, |
|
"learning_rate": 9.078949791522509e-06, |
|
"loss": 2.4487, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.4492875917179086, |
|
"learning_rate": 9.075893845981445e-06, |
|
"loss": 2.7234, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.40343553130410426, |
|
"learning_rate": 9.072833355286104e-06, |
|
"loss": 2.6101, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.38583067832648477, |
|
"learning_rate": 9.069768322849327e-06, |
|
"loss": 2.5303, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.3769044202262478, |
|
"learning_rate": 9.06669875208903e-06, |
|
"loss": 2.5304, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.39753216379422124, |
|
"learning_rate": 9.063624646428184e-06, |
|
"loss": 2.6055, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.39269184723845535, |
|
"learning_rate": 9.060546009294818e-06, |
|
"loss": 2.5723, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.4054397241864531, |
|
"learning_rate": 9.057462844122016e-06, |
|
"loss": 2.6027, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.38168745739432036, |
|
"learning_rate": 9.054375154347908e-06, |
|
"loss": 2.6156, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.39640886311164103, |
|
"learning_rate": 9.051282943415673e-06, |
|
"loss": 2.5587, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.3905155791570832, |
|
"learning_rate": 9.048186214773531e-06, |
|
"loss": 2.6544, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.4218796447215651, |
|
"learning_rate": 9.045084971874738e-06, |
|
"loss": 2.4422, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.41284861860724453, |
|
"learning_rate": 9.041979218177586e-06, |
|
"loss": 2.6839, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.37382785075625063, |
|
"learning_rate": 9.038868957145394e-06, |
|
"loss": 2.6169, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.37855383618111166, |
|
"learning_rate": 9.035754192246513e-06, |
|
"loss": 2.6089, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.40886094964428643, |
|
"learning_rate": 9.032634926954311e-06, |
|
"loss": 2.5738, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.38056500135812904, |
|
"learning_rate": 9.029511164747175e-06, |
|
"loss": 2.5429, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.3479316991805906, |
|
"learning_rate": 9.02638290910851e-06, |
|
"loss": 2.484, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.3866011315472774, |
|
"learning_rate": 9.023250163526731e-06, |
|
"loss": 2.5471, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.3823249982531166, |
|
"learning_rate": 9.020112931495256e-06, |
|
"loss": 2.528, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.3746182564964411, |
|
"learning_rate": 9.016971216512508e-06, |
|
"loss": 2.4831, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.3687923342439741, |
|
"learning_rate": 9.013825022081915e-06, |
|
"loss": 2.586, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.38139049958522336, |
|
"learning_rate": 9.01067435171189e-06, |
|
"loss": 2.6182, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.3886422764228466, |
|
"learning_rate": 9.007519208915843e-06, |
|
"loss": 2.6021, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.38891624551252624, |
|
"learning_rate": 9.004359597212173e-06, |
|
"loss": 2.581, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.4032441961032781, |
|
"learning_rate": 9.001195520124257e-06, |
|
"loss": 2.5595, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.39172542068772426, |
|
"learning_rate": 8.998026981180454e-06, |
|
"loss": 2.5584, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.35467448945907143, |
|
"learning_rate": 8.994853983914104e-06, |
|
"loss": 2.5566, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.39820696201163513, |
|
"learning_rate": 8.991676531863507e-06, |
|
"loss": 2.5934, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.37666804152353134, |
|
"learning_rate": 8.988494628571943e-06, |
|
"loss": 2.5704, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.37676360747299015, |
|
"learning_rate": 8.985308277587647e-06, |
|
"loss": 2.6006, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.39448469661600266, |
|
"learning_rate": 8.982117482463817e-06, |
|
"loss": 2.5857, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.3815405598299067, |
|
"learning_rate": 8.978922246758607e-06, |
|
"loss": 2.5791, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.4020737047310836, |
|
"learning_rate": 8.975722574035122e-06, |
|
"loss": 2.6392, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.4058345357510237, |
|
"learning_rate": 8.972518467861413e-06, |
|
"loss": 2.5718, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.3968737339390062, |
|
"learning_rate": 8.969309931810483e-06, |
|
"loss": 2.6727, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.4044949511659703, |
|
"learning_rate": 8.966096969460263e-06, |
|
"loss": 2.6641, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.4050966911512252, |
|
"learning_rate": 8.96287958439363e-06, |
|
"loss": 2.5858, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.37963019358499156, |
|
"learning_rate": 8.959657780198388e-06, |
|
"loss": 2.5699, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.3752560536088806, |
|
"learning_rate": 8.956431560467267e-06, |
|
"loss": 2.5395, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.41909915260278424, |
|
"learning_rate": 8.953200928797926e-06, |
|
"loss": 2.5608, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.3757858981917666, |
|
"learning_rate": 8.94996588879294e-06, |
|
"loss": 2.534, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.41734799073355916, |
|
"learning_rate": 8.946726444059804e-06, |
|
"loss": 2.5867, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.40750575386267923, |
|
"learning_rate": 8.94348259821092e-06, |
|
"loss": 2.606, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.37301705525833967, |
|
"learning_rate": 8.940234354863599e-06, |
|
"loss": 2.4973, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.39286495756761686, |
|
"learning_rate": 8.936981717640061e-06, |
|
"loss": 2.5394, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.4361679370503528, |
|
"learning_rate": 8.933724690167417e-06, |
|
"loss": 2.6022, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.40947083014043045, |
|
"learning_rate": 8.930463276077681e-06, |
|
"loss": 2.5744, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.3617481574016219, |
|
"learning_rate": 8.927197479007753e-06, |
|
"loss": 2.4723, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.395860622954373, |
|
"learning_rate": 8.923927302599429e-06, |
|
"loss": 2.5622, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.4065535584975831, |
|
"learning_rate": 8.920652750499378e-06, |
|
"loss": 2.6154, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.367418500595609, |
|
"learning_rate": 8.917373826359156e-06, |
|
"loss": 2.6548, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.39840840252112203, |
|
"learning_rate": 8.91409053383519e-06, |
|
"loss": 2.687, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.37079672722035706, |
|
"learning_rate": 8.910802876588783e-06, |
|
"loss": 2.6192, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.37845913570811357, |
|
"learning_rate": 8.9075108582861e-06, |
|
"loss": 2.5964, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.3668806538966641, |
|
"learning_rate": 8.904214482598172e-06, |
|
"loss": 2.5888, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.38671034986832453, |
|
"learning_rate": 8.900913753200887e-06, |
|
"loss": 2.5977, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.3836801777296474, |
|
"learning_rate": 8.897608673774995e-06, |
|
"loss": 2.5607, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.3916600818621598, |
|
"learning_rate": 8.894299248006088e-06, |
|
"loss": 2.5144, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.38926938389539, |
|
"learning_rate": 8.890985479584606e-06, |
|
"loss": 2.5266, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.4106809801760616, |
|
"learning_rate": 8.88766737220584e-06, |
|
"loss": 2.5809, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.39253585739717706, |
|
"learning_rate": 8.884344929569905e-06, |
|
"loss": 2.6197, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.3907669381712913, |
|
"learning_rate": 8.881018155381766e-06, |
|
"loss": 2.6676, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.38831433667568693, |
|
"learning_rate": 8.877687053351208e-06, |
|
"loss": 2.6853, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.3859399023251534, |
|
"learning_rate": 8.874351627192844e-06, |
|
"loss": 2.6383, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.42425009820652615, |
|
"learning_rate": 8.871011880626112e-06, |
|
"loss": 2.5976, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.37616130699136113, |
|
"learning_rate": 8.867667817375266e-06, |
|
"loss": 2.5867, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.4063149730370637, |
|
"learning_rate": 8.864319441169373e-06, |
|
"loss": 2.6468, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.41735807710575523, |
|
"learning_rate": 8.860966755742308e-06, |
|
"loss": 2.6377, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.4229265106947187, |
|
"learning_rate": 8.857609764832758e-06, |
|
"loss": 2.6582, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.39930886000035337, |
|
"learning_rate": 8.854248472184202e-06, |
|
"loss": 2.3838, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.4024205180881961, |
|
"learning_rate": 8.850882881544923e-06, |
|
"loss": 2.5637, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.41146195888933934, |
|
"learning_rate": 8.847512996667991e-06, |
|
"loss": 2.6788, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.49138344073463536, |
|
"learning_rate": 8.844138821311273e-06, |
|
"loss": 2.6933, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.38353973521143947, |
|
"learning_rate": 8.840760359237412e-06, |
|
"loss": 2.5917, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.40273612065496545, |
|
"learning_rate": 8.837377614213837e-06, |
|
"loss": 2.6275, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.3908994854710692, |
|
"learning_rate": 8.833990590012749e-06, |
|
"loss": 2.5214, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.3935355662204077, |
|
"learning_rate": 8.830599290411123e-06, |
|
"loss": 2.4964, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.39568823949262405, |
|
"learning_rate": 8.827203719190704e-06, |
|
"loss": 2.6924, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.3944176236437975, |
|
"learning_rate": 8.823803880137993e-06, |
|
"loss": 2.617, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.3897676596222373, |
|
"learning_rate": 8.820399777044258e-06, |
|
"loss": 2.6128, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.39558984353182575, |
|
"learning_rate": 8.816991413705515e-06, |
|
"loss": 2.4529, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.3800721664980996, |
|
"learning_rate": 8.81357879392254e-06, |
|
"loss": 2.603, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.45424267616939173, |
|
"learning_rate": 8.810161921500846e-06, |
|
"loss": 2.6688, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.41749462081390515, |
|
"learning_rate": 8.806740800250695e-06, |
|
"loss": 2.5672, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.3809152415456229, |
|
"learning_rate": 8.80331543398708e-06, |
|
"loss": 2.5946, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.37890440163702277, |
|
"learning_rate": 8.799885826529736e-06, |
|
"loss": 2.5157, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.4239167503127884, |
|
"learning_rate": 8.796451981703124e-06, |
|
"loss": 2.5184, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.3758665245833528, |
|
"learning_rate": 8.793013903336428e-06, |
|
"loss": 2.5525, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.3967420933674155, |
|
"learning_rate": 8.789571595263554e-06, |
|
"loss": 2.4598, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.3978377671769104, |
|
"learning_rate": 8.786125061323126e-06, |
|
"loss": 2.5469, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.4260046486600914, |
|
"learning_rate": 8.782674305358481e-06, |
|
"loss": 2.5178, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.3813517395660569, |
|
"learning_rate": 8.77921933121766e-06, |
|
"loss": 2.532, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.4077111889338434, |
|
"learning_rate": 8.775760142753414e-06, |
|
"loss": 2.5217, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.38579419908077955, |
|
"learning_rate": 8.772296743823187e-06, |
|
"loss": 2.4944, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.37757286950559593, |
|
"learning_rate": 8.768829138289124e-06, |
|
"loss": 2.5247, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.3938089311489784, |
|
"learning_rate": 8.765357330018056e-06, |
|
"loss": 2.4969, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.3911060994568757, |
|
"learning_rate": 8.761881322881505e-06, |
|
"loss": 2.5603, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.4093559631988249, |
|
"learning_rate": 8.758401120755672e-06, |
|
"loss": 2.5598, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.4091429591481071, |
|
"learning_rate": 8.754916727521437e-06, |
|
"loss": 2.618, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.41610906563454025, |
|
"learning_rate": 8.751428147064356e-06, |
|
"loss": 2.6246, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.40454770107419946, |
|
"learning_rate": 8.74793538327465e-06, |
|
"loss": 2.6546, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.39444404998089466, |
|
"learning_rate": 8.744438440047207e-06, |
|
"loss": 2.5484, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.3931267733450527, |
|
"learning_rate": 8.740937321281576e-06, |
|
"loss": 2.5972, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.3900499116002855, |
|
"learning_rate": 8.737432030881962e-06, |
|
"loss": 2.6069, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.3931387129910314, |
|
"learning_rate": 8.733922572757223e-06, |
|
"loss": 2.4599, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.40456271070229993, |
|
"learning_rate": 8.730408950820864e-06, |
|
"loss": 2.489, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.42428694119007687, |
|
"learning_rate": 8.726891168991028e-06, |
|
"loss": 2.63, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.39795530349666575, |
|
"learning_rate": 8.723369231190506e-06, |
|
"loss": 2.6393, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.41695698716832086, |
|
"learning_rate": 8.719843141346717e-06, |
|
"loss": 2.6571, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.40011888912769766, |
|
"learning_rate": 8.716312903391712e-06, |
|
"loss": 2.5803, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.43064551627086994, |
|
"learning_rate": 8.71277852126217e-06, |
|
"loss": 2.5659, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.4163134945736012, |
|
"learning_rate": 8.709239998899386e-06, |
|
"loss": 2.5896, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.3985848434870783, |
|
"learning_rate": 8.705697340249275e-06, |
|
"loss": 2.6886, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.42880397473573023, |
|
"learning_rate": 8.702150549262365e-06, |
|
"loss": 2.5263, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.422368185202293, |
|
"learning_rate": 8.698599629893794e-06, |
|
"loss": 2.5641, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.40746276738031384, |
|
"learning_rate": 8.695044586103297e-06, |
|
"loss": 2.5599, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.42202722487495065, |
|
"learning_rate": 8.691485421855214e-06, |
|
"loss": 2.623, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.39927169015807223, |
|
"learning_rate": 8.68792214111848e-06, |
|
"loss": 2.7374, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.41192966538348047, |
|
"learning_rate": 8.684354747866614e-06, |
|
"loss": 2.5767, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.40163184580235517, |
|
"learning_rate": 8.680783246077732e-06, |
|
"loss": 2.5966, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.3846715821878233, |
|
"learning_rate": 8.67720763973452e-06, |
|
"loss": 2.6508, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.4441529117116035, |
|
"learning_rate": 8.67362793282425e-06, |
|
"loss": 2.6694, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.3865231876844743, |
|
"learning_rate": 8.67004412933876e-06, |
|
"loss": 2.588, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.4247407185414399, |
|
"learning_rate": 8.666456233274461e-06, |
|
"loss": 2.5641, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.42994671718800553, |
|
"learning_rate": 8.662864248632325e-06, |
|
"loss": 2.6443, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.3973979478265679, |
|
"learning_rate": 8.659268179417886e-06, |
|
"loss": 2.5991, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.387493032731209, |
|
"learning_rate": 8.65566802964123e-06, |
|
"loss": 2.6397, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.42717867074031024, |
|
"learning_rate": 8.652063803316994e-06, |
|
"loss": 2.5512, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.47574539174721103, |
|
"learning_rate": 8.648455504464362e-06, |
|
"loss": 2.5937, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.4204495330694543, |
|
"learning_rate": 8.644843137107058e-06, |
|
"loss": 2.6502, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.38454967577088195, |
|
"learning_rate": 8.641226705273344e-06, |
|
"loss": 2.5139, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.4056767306969294, |
|
"learning_rate": 8.637606212996016e-06, |
|
"loss": 2.5353, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.39146533223192914, |
|
"learning_rate": 8.633981664312393e-06, |
|
"loss": 2.57, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.44199713258762546, |
|
"learning_rate": 8.630353063264321e-06, |
|
"loss": 2.629, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.4568822242802304, |
|
"learning_rate": 8.626720413898167e-06, |
|
"loss": 2.6111, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.39149382291137347, |
|
"learning_rate": 8.623083720264806e-06, |
|
"loss": 2.5724, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.4047779409319991, |
|
"learning_rate": 8.619442986419631e-06, |
|
"loss": 2.4815, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.39009103519491567, |
|
"learning_rate": 8.615798216422531e-06, |
|
"loss": 2.4815, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.4010843175857095, |
|
"learning_rate": 8.612149414337905e-06, |
|
"loss": 2.6492, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.40851694118151305, |
|
"learning_rate": 8.608496584234641e-06, |
|
"loss": 2.5597, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.3714028727338163, |
|
"learning_rate": 8.604839730186125e-06, |
|
"loss": 2.4603, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.37566221893800655, |
|
"learning_rate": 8.601178856270224e-06, |
|
"loss": 2.6894, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.4014233552191909, |
|
"learning_rate": 8.597513966569295e-06, |
|
"loss": 2.5056, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.3837979730634555, |
|
"learning_rate": 8.593845065170165e-06, |
|
"loss": 2.556, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.45198136738439715, |
|
"learning_rate": 8.590172156164139e-06, |
|
"loss": 2.6372, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.3875310439318507, |
|
"learning_rate": 8.586495243646992e-06, |
|
"loss": 2.5632, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.4249496037391712, |
|
"learning_rate": 8.582814331718961e-06, |
|
"loss": 2.6393, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.44396635342172164, |
|
"learning_rate": 8.579129424484746e-06, |
|
"loss": 2.6965, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.44773976211089295, |
|
"learning_rate": 8.575440526053497e-06, |
|
"loss": 2.6293, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.42289162813804465, |
|
"learning_rate": 8.571747640538818e-06, |
|
"loss": 2.5892, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.38147902332421474, |
|
"learning_rate": 8.568050772058763e-06, |
|
"loss": 2.4829, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.4032218757560865, |
|
"learning_rate": 8.564349924735817e-06, |
|
"loss": 2.555, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.4141529092512198, |
|
"learning_rate": 8.560645102696914e-06, |
|
"loss": 2.6312, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.431306666645557, |
|
"learning_rate": 8.556936310073413e-06, |
|
"loss": 2.4882, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.429178526013904, |
|
"learning_rate": 8.553223551001101e-06, |
|
"loss": 2.5607, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.4243555435023986, |
|
"learning_rate": 8.549506829620193e-06, |
|
"loss": 2.6239, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.414717893313139, |
|
"learning_rate": 8.545786150075314e-06, |
|
"loss": 2.5657, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.38781113900215547, |
|
"learning_rate": 8.542061516515512e-06, |
|
"loss": 2.5971, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.3928555699106409, |
|
"learning_rate": 8.538332933094237e-06, |
|
"loss": 2.6138, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.39710846064235467, |
|
"learning_rate": 8.534600403969352e-06, |
|
"loss": 2.5739, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.41230975156535604, |
|
"learning_rate": 8.530863933303108e-06, |
|
"loss": 2.6418, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.4041817911339788, |
|
"learning_rate": 8.527123525262162e-06, |
|
"loss": 2.4362, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.3975027751225941, |
|
"learning_rate": 8.52337918401756e-06, |
|
"loss": 2.5741, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.40773708260012337, |
|
"learning_rate": 8.519630913744726e-06, |
|
"loss": 2.67, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.4205382680251358, |
|
"learning_rate": 8.515878718623473e-06, |
|
"loss": 2.6794, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.4328560266745644, |
|
"learning_rate": 8.512122602837993e-06, |
|
"loss": 2.6785, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.3886682903181997, |
|
"learning_rate": 8.508362570576842e-06, |
|
"loss": 2.5902, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.39657155849490777, |
|
"learning_rate": 8.504598626032947e-06, |
|
"loss": 2.6572, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.40633343793420695, |
|
"learning_rate": 8.5008307734036e-06, |
|
"loss": 2.5907, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.42596452600683693, |
|
"learning_rate": 8.49705901689045e-06, |
|
"loss": 2.598, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.415658573716301, |
|
"learning_rate": 8.493283360699496e-06, |
|
"loss": 2.6445, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.3817716773710336, |
|
"learning_rate": 8.489503809041089e-06, |
|
"loss": 2.5449, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.4174933180588395, |
|
"learning_rate": 8.485720366129922e-06, |
|
"loss": 2.5976, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.41151998959306785, |
|
"learning_rate": 8.48193303618503e-06, |
|
"loss": 2.5543, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.39560745302793604, |
|
"learning_rate": 8.47814182342978e-06, |
|
"loss": 2.5391, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.39459379097045677, |
|
"learning_rate": 8.47434673209187e-06, |
|
"loss": 2.5435, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.4146931721601953, |
|
"learning_rate": 8.47054776640332e-06, |
|
"loss": 2.661, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.4311836896585072, |
|
"learning_rate": 8.46674493060048e-06, |
|
"loss": 2.5523, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.40199742643044956, |
|
"learning_rate": 8.462938228924e-06, |
|
"loss": 2.5918, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.37538274464984434, |
|
"learning_rate": 8.459127665618858e-06, |
|
"loss": 2.5758, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.43428204866077696, |
|
"learning_rate": 8.455313244934324e-06, |
|
"loss": 2.5581, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.4037835921789478, |
|
"learning_rate": 8.45149497112398e-06, |
|
"loss": 2.6926, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.4091287731459371, |
|
"learning_rate": 8.447672848445695e-06, |
|
"loss": 2.5811, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.4217107594408988, |
|
"learning_rate": 8.44384688116164e-06, |
|
"loss": 2.5743, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.39746033764543187, |
|
"learning_rate": 8.44001707353827e-06, |
|
"loss": 2.5957, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.4441830214745251, |
|
"learning_rate": 8.436183429846314e-06, |
|
"loss": 2.6142, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.40324050076689993, |
|
"learning_rate": 8.43234595436079e-06, |
|
"loss": 2.5527, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.4211610121078822, |
|
"learning_rate": 8.428504651360988e-06, |
|
"loss": 2.6577, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.40083303433021816, |
|
"learning_rate": 8.424659525130456e-06, |
|
"loss": 2.6756, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.40395875512045837, |
|
"learning_rate": 8.420810579957016e-06, |
|
"loss": 2.5976, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.3813669588134605, |
|
"learning_rate": 8.416957820132743e-06, |
|
"loss": 2.5569, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.4134031840620254, |
|
"learning_rate": 8.413101249953971e-06, |
|
"loss": 2.4946, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.40916067730209765, |
|
"learning_rate": 8.409240873721277e-06, |
|
"loss": 2.5588, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.420463496600291, |
|
"learning_rate": 8.405376695739485e-06, |
|
"loss": 2.6316, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.38726844637275737, |
|
"learning_rate": 8.40150872031766e-06, |
|
"loss": 2.7317, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.46619616046782564, |
|
"learning_rate": 8.397636951769099e-06, |
|
"loss": 2.6267, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.41938914728535004, |
|
"learning_rate": 8.39376139441133e-06, |
|
"loss": 2.5999, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.4128360578700934, |
|
"learning_rate": 8.389882052566106e-06, |
|
"loss": 2.6008, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.4236692842899877, |
|
"learning_rate": 8.3859989305594e-06, |
|
"loss": 2.5726, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.4078628362817052, |
|
"learning_rate": 8.3821120327214e-06, |
|
"loss": 2.5205, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.39618573898162357, |
|
"learning_rate": 8.378221363386506e-06, |
|
"loss": 2.5956, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.4034964151601362, |
|
"learning_rate": 8.37432692689332e-06, |
|
"loss": 2.4988, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.43923027322011543, |
|
"learning_rate": 8.370428727584654e-06, |
|
"loss": 2.5139, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.4248305389606218, |
|
"learning_rate": 8.366526769807503e-06, |
|
"loss": 2.5911, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.4110205308990833, |
|
"learning_rate": 8.362621057913063e-06, |
|
"loss": 2.5665, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.4144144901313341, |
|
"learning_rate": 8.358711596256712e-06, |
|
"loss": 2.5092, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.4197156000281137, |
|
"learning_rate": 8.354798389198013e-06, |
|
"loss": 2.6169, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.40972816288766706, |
|
"learning_rate": 8.3508814411007e-06, |
|
"loss": 2.6123, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.41927212859085966, |
|
"learning_rate": 8.346960756332683e-06, |
|
"loss": 2.4813, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.4272037158112424, |
|
"learning_rate": 8.34303633926604e-06, |
|
"loss": 2.5397, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.41655980368998585, |
|
"learning_rate": 8.339108194277006e-06, |
|
"loss": 2.5427, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.41470497597592165, |
|
"learning_rate": 8.33517632574598e-06, |
|
"loss": 2.6004, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.43266402910209373, |
|
"learning_rate": 8.331240738057502e-06, |
|
"loss": 2.5065, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.4241724505370535, |
|
"learning_rate": 8.327301435600273e-06, |
|
"loss": 2.6018, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.4055328825578019, |
|
"learning_rate": 8.32335842276713e-06, |
|
"loss": 2.5533, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.4080727473950192, |
|
"learning_rate": 8.319411703955042e-06, |
|
"loss": 2.474, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.3937313923147714, |
|
"learning_rate": 8.315461283565123e-06, |
|
"loss": 2.5147, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.4165975592841607, |
|
"learning_rate": 8.311507166002604e-06, |
|
"loss": 2.5524, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.40304916540901103, |
|
"learning_rate": 8.307549355676843e-06, |
|
"loss": 2.6405, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.4089370226477569, |
|
"learning_rate": 8.303587857001316e-06, |
|
"loss": 2.6325, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.4013332900237908, |
|
"learning_rate": 8.299622674393615e-06, |
|
"loss": 2.5728, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.3970217140730988, |
|
"learning_rate": 8.29565381227543e-06, |
|
"loss": 2.578, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.4450471154194791, |
|
"learning_rate": 8.291681275072568e-06, |
|
"loss": 2.6582, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.4022231800937019, |
|
"learning_rate": 8.287705067214921e-06, |
|
"loss": 2.5899, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.39869647865692215, |
|
"learning_rate": 8.283725193136487e-06, |
|
"loss": 2.4949, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.38416378762137965, |
|
"learning_rate": 8.27974165727534e-06, |
|
"loss": 2.5464, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.4233181605786404, |
|
"learning_rate": 8.275754464073645e-06, |
|
"loss": 2.6254, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.4102451954629436, |
|
"learning_rate": 8.271763617977642e-06, |
|
"loss": 2.5013, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.4252367825626563, |
|
"learning_rate": 8.26776912343765e-06, |
|
"loss": 2.6307, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.4420150339625009, |
|
"learning_rate": 8.263770984908049e-06, |
|
"loss": 2.5286, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.43126175076604883, |
|
"learning_rate": 8.259769206847286e-06, |
|
"loss": 2.6439, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.4014097316497402, |
|
"learning_rate": 8.255763793717868e-06, |
|
"loss": 2.5087, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.40908127650566717, |
|
"learning_rate": 8.251754749986353e-06, |
|
"loss": 2.5259, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.44382902301414784, |
|
"learning_rate": 8.247742080123351e-06, |
|
"loss": 2.5059, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.5690583326008019, |
|
"learning_rate": 8.243725788603509e-06, |
|
"loss": 2.5809, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.4005616171832035, |
|
"learning_rate": 8.239705879905519e-06, |
|
"loss": 2.5534, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.4180142030333501, |
|
"learning_rate": 8.235682358512106e-06, |
|
"loss": 2.6144, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.4335665651339525, |
|
"learning_rate": 8.231655228910022e-06, |
|
"loss": 2.5939, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.44795603542160145, |
|
"learning_rate": 8.227624495590041e-06, |
|
"loss": 2.6328, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.39990249279797285, |
|
"learning_rate": 8.223590163046956e-06, |
|
"loss": 2.4888, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.422567364553395, |
|
"learning_rate": 8.219552235779578e-06, |
|
"loss": 2.5965, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.4314727111378184, |
|
"learning_rate": 8.215510718290724e-06, |
|
"loss": 2.6093, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.40041522438961896, |
|
"learning_rate": 8.211465615087213e-06, |
|
"loss": 2.5153, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.41522560863984265, |
|
"learning_rate": 8.207416930679862e-06, |
|
"loss": 2.6018, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.42312993496111684, |
|
"learning_rate": 8.203364669583485e-06, |
|
"loss": 2.5895, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.42288979584341124, |
|
"learning_rate": 8.199308836316883e-06, |
|
"loss": 2.5965, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.42051194007786136, |
|
"learning_rate": 8.19524943540284e-06, |
|
"loss": 2.4559, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.42267226014337067, |
|
"learning_rate": 8.191186471368115e-06, |
|
"loss": 2.5894, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.4204184855877659, |
|
"learning_rate": 8.18711994874345e-06, |
|
"loss": 2.5612, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.41720484508218253, |
|
"learning_rate": 8.183049872063543e-06, |
|
"loss": 2.5136, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.4464853468623226, |
|
"learning_rate": 8.178976245867068e-06, |
|
"loss": 2.635, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.4286598401042708, |
|
"learning_rate": 8.174899074696645e-06, |
|
"loss": 2.6825, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.43245122063916924, |
|
"learning_rate": 8.170818363098854e-06, |
|
"loss": 2.5948, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.41369230888120184, |
|
"learning_rate": 8.166734115624223e-06, |
|
"loss": 2.6412, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.4293671193850312, |
|
"learning_rate": 8.162646336827222e-06, |
|
"loss": 2.5823, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.4369219810371937, |
|
"learning_rate": 8.158555031266255e-06, |
|
"loss": 2.5921, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.41473690029057125, |
|
"learning_rate": 8.154460203503667e-06, |
|
"loss": 2.547, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.4312208386941552, |
|
"learning_rate": 8.150361858105724e-06, |
|
"loss": 2.5732, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.4390124675983267, |
|
"learning_rate": 8.146259999642614e-06, |
|
"loss": 2.5189, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.4137874265379334, |
|
"learning_rate": 8.14215463268845e-06, |
|
"loss": 2.5845, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.39993164923934327, |
|
"learning_rate": 8.138045761821252e-06, |
|
"loss": 2.6329, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.4437132444069032, |
|
"learning_rate": 8.133933391622944e-06, |
|
"loss": 2.6292, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.4235821198963829, |
|
"learning_rate": 8.129817526679358e-06, |
|
"loss": 2.6303, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.39602619625874363, |
|
"learning_rate": 8.125698171580224e-06, |
|
"loss": 2.6121, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.4132650361368975, |
|
"learning_rate": 8.121575330919159e-06, |
|
"loss": 2.6254, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.4030628075050604, |
|
"learning_rate": 8.117449009293668e-06, |
|
"loss": 2.7212, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.40489013194172396, |
|
"learning_rate": 8.113319211305141e-06, |
|
"loss": 2.6366, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.44092957744334427, |
|
"learning_rate": 8.109185941558842e-06, |
|
"loss": 2.6076, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.42045429985990546, |
|
"learning_rate": 8.105049204663906e-06, |
|
"loss": 2.5336, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.42044467762139054, |
|
"learning_rate": 8.100909005233336e-06, |
|
"loss": 2.6656, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.4176046443329716, |
|
"learning_rate": 8.096765347883995e-06, |
|
"loss": 2.5584, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.40906033106637607, |
|
"learning_rate": 8.092618237236602e-06, |
|
"loss": 2.4977, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.41217420167561647, |
|
"learning_rate": 8.088467677915728e-06, |
|
"loss": 2.5494, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.4164262634391922, |
|
"learning_rate": 8.084313674549789e-06, |
|
"loss": 2.7228, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.4237722192976071, |
|
"learning_rate": 8.080156231771043e-06, |
|
"loss": 2.5715, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.414586509360156, |
|
"learning_rate": 8.075995354215578e-06, |
|
"loss": 2.6481, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.3930741587756925, |
|
"learning_rate": 8.07183104652332e-06, |
|
"loss": 2.5999, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.3800161956171746, |
|
"learning_rate": 8.067663313338014e-06, |
|
"loss": 2.565, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.42395921077528126, |
|
"learning_rate": 8.06349215930723e-06, |
|
"loss": 2.5922, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.41039686865262626, |
|
"learning_rate": 8.059317589082349e-06, |
|
"loss": 2.5915, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.4214562704564119, |
|
"learning_rate": 8.055139607318558e-06, |
|
"loss": 2.5322, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.42154062229902023, |
|
"learning_rate": 8.050958218674859e-06, |
|
"loss": 2.5986, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.4310299450125749, |
|
"learning_rate": 8.046773427814043e-06, |
|
"loss": 2.6266, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.4395628220019876, |
|
"learning_rate": 8.042585239402697e-06, |
|
"loss": 2.5421, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.43732728273627564, |
|
"learning_rate": 8.038393658111203e-06, |
|
"loss": 2.4944, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.4099976369161659, |
|
"learning_rate": 8.03419868861372e-06, |
|
"loss": 2.5705, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.40913216034905114, |
|
"learning_rate": 8.030000335588187e-06, |
|
"loss": 2.5889, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.3980903983537651, |
|
"learning_rate": 8.025798603716314e-06, |
|
"loss": 2.5876, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.4298468554380081, |
|
"learning_rate": 8.021593497683587e-06, |
|
"loss": 2.5664, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.46062635167931587, |
|
"learning_rate": 8.01738502217924e-06, |
|
"loss": 2.644, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.4229239386448534, |
|
"learning_rate": 8.013173181896283e-06, |
|
"loss": 2.5189, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.4081552650831732, |
|
"learning_rate": 8.008957981531462e-06, |
|
"loss": 2.4752, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.4331028195271111, |
|
"learning_rate": 8.004739425785277e-06, |
|
"loss": 2.5987, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.40920188315560446, |
|
"learning_rate": 8.00051751936197e-06, |
|
"loss": 2.5385, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.4316135933123064, |
|
"learning_rate": 7.996292266969518e-06, |
|
"loss": 2.5597, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.4462340506740775, |
|
"learning_rate": 7.992063673319632e-06, |
|
"loss": 2.4633, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.42580447002405547, |
|
"learning_rate": 7.987831743127742e-06, |
|
"loss": 2.4647, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.41925259860489633, |
|
"learning_rate": 7.983596481113005e-06, |
|
"loss": 2.5792, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.42194763194144164, |
|
"learning_rate": 7.979357891998294e-06, |
|
"loss": 2.5388, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.4321751284259449, |
|
"learning_rate": 7.975115980510187e-06, |
|
"loss": 2.6989, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.412295945588522, |
|
"learning_rate": 7.97087075137897e-06, |
|
"loss": 2.5899, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.39834515118459113, |
|
"learning_rate": 7.966622209338629e-06, |
|
"loss": 2.589, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.413233484922437, |
|
"learning_rate": 7.962370359126843e-06, |
|
"loss": 2.6658, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.4605783421036789, |
|
"learning_rate": 7.95811520548498e-06, |
|
"loss": 2.6009, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.44280924675391853, |
|
"learning_rate": 7.953856753158096e-06, |
|
"loss": 2.6904, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.399962256558219, |
|
"learning_rate": 7.949595006894917e-06, |
|
"loss": 2.5138, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.4065872956264197, |
|
"learning_rate": 7.945329971447851e-06, |
|
"loss": 2.5331, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.43139524087257114, |
|
"learning_rate": 7.941061651572965e-06, |
|
"loss": 2.654, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.43610262802638144, |
|
"learning_rate": 7.93679005203e-06, |
|
"loss": 2.4393, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.4305492602763158, |
|
"learning_rate": 7.932515177582341e-06, |
|
"loss": 2.6138, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.4221443270245523, |
|
"learning_rate": 7.928237032997037e-06, |
|
"loss": 2.618, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.4422278732901728, |
|
"learning_rate": 7.923955623044775e-06, |
|
"loss": 2.6176, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.4266337688002018, |
|
"learning_rate": 7.919670952499889e-06, |
|
"loss": 2.4747, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.4477483651287364, |
|
"learning_rate": 7.915383026140344e-06, |
|
"loss": 2.5751, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.4177444069840092, |
|
"learning_rate": 7.91109184874774e-06, |
|
"loss": 2.6049, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.42079897809566313, |
|
"learning_rate": 7.9067974251073e-06, |
|
"loss": 2.5587, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.42684865722248433, |
|
"learning_rate": 7.902499760007867e-06, |
|
"loss": 2.5912, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.4071847359013808, |
|
"learning_rate": 7.898198858241902e-06, |
|
"loss": 2.5533, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.4272959827178456, |
|
"learning_rate": 7.89389472460547e-06, |
|
"loss": 2.6196, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.4567896212371643, |
|
"learning_rate": 7.889587363898242e-06, |
|
"loss": 2.6187, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.4213243352124343, |
|
"learning_rate": 7.885276780923488e-06, |
|
"loss": 2.6833, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.41023311802953555, |
|
"learning_rate": 7.880962980488073e-06, |
|
"loss": 2.6689, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.421825264718791, |
|
"learning_rate": 7.876645967402448e-06, |
|
"loss": 2.608, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.43157690967476947, |
|
"learning_rate": 7.872325746480645e-06, |
|
"loss": 2.534, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.4513483239147945, |
|
"learning_rate": 7.868002322540279e-06, |
|
"loss": 2.6496, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.44068006685442734, |
|
"learning_rate": 7.863675700402527e-06, |
|
"loss": 2.6127, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.4340762297818696, |
|
"learning_rate": 7.859345884892143e-06, |
|
"loss": 2.583, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.5535594322513243, |
|
"learning_rate": 7.855012880837432e-06, |
|
"loss": 2.4857, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.4725111746601944, |
|
"learning_rate": 7.850676693070266e-06, |
|
"loss": 2.5878, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.41724059592129575, |
|
"learning_rate": 7.846337326426057e-06, |
|
"loss": 2.5041, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.42325495272452096, |
|
"learning_rate": 7.841994785743765e-06, |
|
"loss": 2.4939, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.4448404822113957, |
|
"learning_rate": 7.837649075865893e-06, |
|
"loss": 2.7193, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.411834046291556, |
|
"learning_rate": 7.833300201638475e-06, |
|
"loss": 2.5689, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.43112137576355314, |
|
"learning_rate": 7.828948167911073e-06, |
|
"loss": 2.4763, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.41680385794684577, |
|
"learning_rate": 7.824592979536774e-06, |
|
"loss": 2.6338, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.4122113287041295, |
|
"learning_rate": 7.820234641372182e-06, |
|
"loss": 2.4718, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.4246695467962534, |
|
"learning_rate": 7.815873158277414e-06, |
|
"loss": 2.5562, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.42277124360103996, |
|
"learning_rate": 7.811508535116093e-06, |
|
"loss": 2.4938, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.4513815520578765, |
|
"learning_rate": 7.807140776755345e-06, |
|
"loss": 2.4926, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.45996391797568054, |
|
"learning_rate": 7.80276988806579e-06, |
|
"loss": 2.7031, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.4831176558870303, |
|
"learning_rate": 7.798395873921542e-06, |
|
"loss": 2.5952, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.45636962210820387, |
|
"learning_rate": 7.794018739200196e-06, |
|
"loss": 2.4938, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.4582229304281336, |
|
"learning_rate": 7.789638488782832e-06, |
|
"loss": 2.748, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.4515473425938092, |
|
"learning_rate": 7.785255127553999e-06, |
|
"loss": 2.6467, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.4435663622348958, |
|
"learning_rate": 7.780868660401719e-06, |
|
"loss": 2.5819, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.41412840624582015, |
|
"learning_rate": 7.776479092217475e-06, |
|
"loss": 2.5298, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.43024657897653096, |
|
"learning_rate": 7.772086427896211e-06, |
|
"loss": 2.571, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.4374848771887548, |
|
"learning_rate": 7.767690672336321e-06, |
|
"loss": 2.5744, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.42377155119745097, |
|
"learning_rate": 7.763291830439646e-06, |
|
"loss": 2.4888, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.4385016759124883, |
|
"learning_rate": 7.75888990711147e-06, |
|
"loss": 2.6311, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.4447177294698723, |
|
"learning_rate": 7.754484907260513e-06, |
|
"loss": 2.6521, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.4077239806470596, |
|
"learning_rate": 7.750076835798927e-06, |
|
"loss": 2.5797, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.428657823891645, |
|
"learning_rate": 7.745665697642284e-06, |
|
"loss": 2.5617, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.4354502823126484, |
|
"learning_rate": 7.741251497709583e-06, |
|
"loss": 2.6247, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.4633385145231587, |
|
"learning_rate": 7.736834240923233e-06, |
|
"loss": 2.6145, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.398561346216612, |
|
"learning_rate": 7.73241393220905e-06, |
|
"loss": 2.4895, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.4329616819731975, |
|
"learning_rate": 7.727990576496259e-06, |
|
"loss": 2.6225, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.4285001823264661, |
|
"learning_rate": 7.723564178717477e-06, |
|
"loss": 2.4792, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.4799081586852101, |
|
"learning_rate": 7.71913474380872e-06, |
|
"loss": 2.645, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.440591651249703, |
|
"learning_rate": 7.714702276709382e-06, |
|
"loss": 2.6201, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.42045495551086975, |
|
"learning_rate": 7.710266782362248e-06, |
|
"loss": 2.4546, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.44996984963607556, |
|
"learning_rate": 7.705828265713468e-06, |
|
"loss": 2.5964, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.44860901294069305, |
|
"learning_rate": 7.701386731712575e-06, |
|
"loss": 2.579, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.4465100265169577, |
|
"learning_rate": 7.696942185312453e-06, |
|
"loss": 2.5449, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.44784574908323493, |
|
"learning_rate": 7.692494631469356e-06, |
|
"loss": 2.4899, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.43842323845570746, |
|
"learning_rate": 7.688044075142888e-06, |
|
"loss": 2.5747, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.43228122591332846, |
|
"learning_rate": 7.683590521295998e-06, |
|
"loss": 2.5739, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.434804720290425, |
|
"learning_rate": 7.679133974894984e-06, |
|
"loss": 2.6809, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.42894342327405344, |
|
"learning_rate": 7.674674440909475e-06, |
|
"loss": 2.5423, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.41596006640315525, |
|
"learning_rate": 7.670211924312437e-06, |
|
"loss": 2.6565, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.4399642006984142, |
|
"learning_rate": 7.665746430080155e-06, |
|
"loss": 2.5905, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.43095602575237124, |
|
"learning_rate": 7.661277963192241e-06, |
|
"loss": 2.5439, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.42881278390211974, |
|
"learning_rate": 7.65680652863162e-06, |
|
"loss": 2.5704, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.4403334552659803, |
|
"learning_rate": 7.652332131384526e-06, |
|
"loss": 2.5899, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.43952778499904116, |
|
"learning_rate": 7.647854776440496e-06, |
|
"loss": 2.6278, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.44350619132871466, |
|
"learning_rate": 7.643374468792364e-06, |
|
"loss": 2.5577, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.43480932970578706, |
|
"learning_rate": 7.638891213436261e-06, |
|
"loss": 2.5002, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.4326045635095116, |
|
"learning_rate": 7.634405015371603e-06, |
|
"loss": 2.5841, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.45248147057167837, |
|
"learning_rate": 7.629915879601087e-06, |
|
"loss": 2.6901, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.47546982179960307, |
|
"learning_rate": 7.625423811130681e-06, |
|
"loss": 2.533, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.42614586992931125, |
|
"learning_rate": 7.620928814969636e-06, |
|
"loss": 2.6475, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.4395102230701426, |
|
"learning_rate": 7.616430896130456e-06, |
|
"loss": 2.5916, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.43456558053515715, |
|
"learning_rate": 7.611930059628909e-06, |
|
"loss": 2.5627, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.459423844990916, |
|
"learning_rate": 7.607426310484014e-06, |
|
"loss": 2.522, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.43233604854428664, |
|
"learning_rate": 7.602919653718044e-06, |
|
"loss": 2.5904, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.4697293541204728, |
|
"learning_rate": 7.5984100943565055e-06, |
|
"loss": 2.5513, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.4225011347016215, |
|
"learning_rate": 7.5938976374281515e-06, |
|
"loss": 2.4909, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.43926955733620127, |
|
"learning_rate": 7.589382287964957e-06, |
|
"loss": 2.6104, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.45591129115222123, |
|
"learning_rate": 7.584864051002128e-06, |
|
"loss": 2.7106, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.4349453283077686, |
|
"learning_rate": 7.58034293157809e-06, |
|
"loss": 2.5365, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.42835368432163357, |
|
"learning_rate": 7.57581893473448e-06, |
|
"loss": 2.5712, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.425073700713382, |
|
"learning_rate": 7.571292065516149e-06, |
|
"loss": 2.5438, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.43117639770458216, |
|
"learning_rate": 7.566762328971146e-06, |
|
"loss": 2.5966, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.4350355422638024, |
|
"learning_rate": 7.562229730150716e-06, |
|
"loss": 2.5597, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.45651613860416895, |
|
"learning_rate": 7.557694274109305e-06, |
|
"loss": 2.6819, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.4178926630375163, |
|
"learning_rate": 7.553155965904535e-06, |
|
"loss": 2.6029, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.42667679705117045, |
|
"learning_rate": 7.548614810597214e-06, |
|
"loss": 2.513, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.42468254701197683, |
|
"learning_rate": 7.544070813251324e-06, |
|
"loss": 2.5881, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.43467675094971464, |
|
"learning_rate": 7.5395239789340155e-06, |
|
"loss": 2.5797, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.4389732792842581, |
|
"learning_rate": 7.534974312715603e-06, |
|
"loss": 2.5709, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.4411521788061547, |
|
"learning_rate": 7.530421819669558e-06, |
|
"loss": 2.6326, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.4557196879536489, |
|
"learning_rate": 7.5258665048725065e-06, |
|
"loss": 2.596, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.43871902185286676, |
|
"learning_rate": 7.521308373404218e-06, |
|
"loss": 2.5556, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.42755630130788425, |
|
"learning_rate": 7.516747430347607e-06, |
|
"loss": 2.7271, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.4318108374082864, |
|
"learning_rate": 7.51218368078872e-06, |
|
"loss": 2.5897, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.43372744999074364, |
|
"learning_rate": 7.507617129816733e-06, |
|
"loss": 2.6207, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.4349061846690901, |
|
"learning_rate": 7.503047782523949e-06, |
|
"loss": 2.5824, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.44955498095402496, |
|
"learning_rate": 7.498475644005787e-06, |
|
"loss": 2.5644, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.4421726426503066, |
|
"learning_rate": 7.4939007193607786e-06, |
|
"loss": 2.6945, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.41894787340781975, |
|
"learning_rate": 7.489323013690561e-06, |
|
"loss": 2.5864, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.43641982246269445, |
|
"learning_rate": 7.484742532099878e-06, |
|
"loss": 2.5838, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.433542970312903, |
|
"learning_rate": 7.480159279696562e-06, |
|
"loss": 2.5402, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.47002219178179716, |
|
"learning_rate": 7.4755732615915385e-06, |
|
"loss": 2.6275, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.42655592623385635, |
|
"learning_rate": 7.470984482898818e-06, |
|
"loss": 2.5379, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.4490796246910226, |
|
"learning_rate": 7.46639294873549e-06, |
|
"loss": 2.5878, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.4353762578998085, |
|
"learning_rate": 7.461798664221711e-06, |
|
"loss": 2.5528, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.4359763726298193, |
|
"learning_rate": 7.457201634480713e-06, |
|
"loss": 2.4963, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.4293488989036144, |
|
"learning_rate": 7.452601864638782e-06, |
|
"loss": 2.507, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.4632695896167269, |
|
"learning_rate": 7.447999359825263e-06, |
|
"loss": 2.5688, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.4717146582108519, |
|
"learning_rate": 7.443394125172552e-06, |
|
"loss": 2.562, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.45926730586833125, |
|
"learning_rate": 7.438786165816084e-06, |
|
"loss": 2.5078, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.42035212985829007, |
|
"learning_rate": 7.434175486894339e-06, |
|
"loss": 2.6623, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.442070486539419, |
|
"learning_rate": 7.429562093548825e-06, |
|
"loss": 2.5572, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.4346339147079722, |
|
"learning_rate": 7.42494599092408e-06, |
|
"loss": 2.5031, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.4486843745453151, |
|
"learning_rate": 7.42032718416766e-06, |
|
"loss": 2.6138, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.454243627496171, |
|
"learning_rate": 7.415705678430138e-06, |
|
"loss": 2.569, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.4378035377930223, |
|
"learning_rate": 7.4110814788651e-06, |
|
"loss": 2.5737, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.4408324571659681, |
|
"learning_rate": 7.406454590629129e-06, |
|
"loss": 2.5312, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.43626638605774093, |
|
"learning_rate": 7.401825018881812e-06, |
|
"loss": 2.6114, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.45581278624186283, |
|
"learning_rate": 7.397192768785724e-06, |
|
"loss": 2.6438, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.43570018858293746, |
|
"learning_rate": 7.392557845506433e-06, |
|
"loss": 2.5748, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.4209876795639996, |
|
"learning_rate": 7.387920254212482e-06, |
|
"loss": 2.5272, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.44083438389417995, |
|
"learning_rate": 7.383280000075392e-06, |
|
"loss": 2.5001, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.453200235369117, |
|
"learning_rate": 7.378637088269652e-06, |
|
"loss": 2.6287, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.43011889909731615, |
|
"learning_rate": 7.373991523972715e-06, |
|
"loss": 2.5724, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.4442709068247595, |
|
"learning_rate": 7.369343312364994e-06, |
|
"loss": 2.681, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.43930051060921954, |
|
"learning_rate": 7.3646924586298515e-06, |
|
"loss": 2.4956, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.44486461669517774, |
|
"learning_rate": 7.360038967953598e-06, |
|
"loss": 2.617, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.4337851986633554, |
|
"learning_rate": 7.355382845525481e-06, |
|
"loss": 2.5871, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.4768810458318751, |
|
"learning_rate": 7.350724096537687e-06, |
|
"loss": 2.6812, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.4294759402946892, |
|
"learning_rate": 7.346062726185332e-06, |
|
"loss": 2.4668, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.4340582069750426, |
|
"learning_rate": 7.341398739666451e-06, |
|
"loss": 2.4101, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.40832331589977605, |
|
"learning_rate": 7.336732142181998e-06, |
|
"loss": 2.512, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.4447890099201175, |
|
"learning_rate": 7.33206293893584e-06, |
|
"loss": 2.6111, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.4559691714502572, |
|
"learning_rate": 7.3273911351347495e-06, |
|
"loss": 2.6645, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.45753809546198526, |
|
"learning_rate": 7.3227167359883964e-06, |
|
"loss": 2.6484, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.4663086257055781, |
|
"learning_rate": 7.318039746709349e-06, |
|
"loss": 2.6783, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.41836926175215, |
|
"learning_rate": 7.313360172513059e-06, |
|
"loss": 2.6437, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.4192792544101712, |
|
"learning_rate": 7.3086780186178645e-06, |
|
"loss": 2.4873, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.4517224408067883, |
|
"learning_rate": 7.30399329024498e-06, |
|
"loss": 2.6459, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.42751398897427867, |
|
"learning_rate": 7.299305992618488e-06, |
|
"loss": 2.5837, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.4265308955295411, |
|
"learning_rate": 7.2946161309653375e-06, |
|
"loss": 2.5892, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.4238407467136127, |
|
"learning_rate": 7.289923710515338e-06, |
|
"loss": 2.5839, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.4614155278757042, |
|
"learning_rate": 7.285228736501153e-06, |
|
"loss": 2.6102, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.4422958659102835, |
|
"learning_rate": 7.280531214158291e-06, |
|
"loss": 2.5856, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.42237143095235286, |
|
"learning_rate": 7.275831148725101e-06, |
|
"loss": 2.5692, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.45361904637478645, |
|
"learning_rate": 7.271128545442774e-06, |
|
"loss": 2.5652, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.4337472729775839, |
|
"learning_rate": 7.266423409555324e-06, |
|
"loss": 2.6164, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.42362391039093894, |
|
"learning_rate": 7.261715746309594e-06, |
|
"loss": 2.5273, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.431012140550024, |
|
"learning_rate": 7.257005560955245e-06, |
|
"loss": 2.5411, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.452142900799659, |
|
"learning_rate": 7.252292858744747e-06, |
|
"loss": 2.5771, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.43273717452006466, |
|
"learning_rate": 7.247577644933379e-06, |
|
"loss": 2.6122, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.462962261724824, |
|
"learning_rate": 7.242859924779221e-06, |
|
"loss": 2.6279, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.4258143210910348, |
|
"learning_rate": 7.238139703543148e-06, |
|
"loss": 2.5806, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.4535192221696845, |
|
"learning_rate": 7.233416986488822e-06, |
|
"loss": 2.609, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.4302664584348388, |
|
"learning_rate": 7.2286917788826926e-06, |
|
"loss": 2.5662, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.46195490134362327, |
|
"learning_rate": 7.2239640859939795e-06, |
|
"loss": 2.6167, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.44182897260584386, |
|
"learning_rate": 7.219233913094683e-06, |
|
"loss": 2.5813, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.4434406021256981, |
|
"learning_rate": 7.214501265459561e-06, |
|
"loss": 2.6143, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.4315144971686448, |
|
"learning_rate": 7.2097661483661355e-06, |
|
"loss": 2.6934, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.441969335136358, |
|
"learning_rate": 7.2050285670946776e-06, |
|
"loss": 2.4584, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.4295633483472062, |
|
"learning_rate": 7.200288526928214e-06, |
|
"loss": 2.5537, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.4590776064719756, |
|
"learning_rate": 7.195546033152506e-06, |
|
"loss": 2.6182, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.5628999506795336, |
|
"learning_rate": 7.1908010910560555e-06, |
|
"loss": 2.5655, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.43083319078636145, |
|
"learning_rate": 7.18605370593009e-06, |
|
"loss": 2.5838, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.43705338433970153, |
|
"learning_rate": 7.181303883068569e-06, |
|
"loss": 2.556, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.41626277262476946, |
|
"learning_rate": 7.176551627768163e-06, |
|
"loss": 2.4507, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.8154045786389643, |
|
"learning_rate": 7.171796945328257e-06, |
|
"loss": 2.5791, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.6811278417412138, |
|
"learning_rate": 7.167039841050947e-06, |
|
"loss": 2.6046, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.4469455756630236, |
|
"learning_rate": 7.16228032024102e-06, |
|
"loss": 2.5503, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.49527095536212423, |
|
"learning_rate": 7.157518388205969e-06, |
|
"loss": 2.446, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.5650785736288945, |
|
"learning_rate": 7.152754050255967e-06, |
|
"loss": 2.5739, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.4688134150524742, |
|
"learning_rate": 7.147987311703875e-06, |
|
"loss": 2.53, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.4343253484657035, |
|
"learning_rate": 7.143218177865229e-06, |
|
"loss": 2.6128, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.4380533261518903, |
|
"learning_rate": 7.138446654058238e-06, |
|
"loss": 2.5129, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.43821048947336705, |
|
"learning_rate": 7.1336727456037716e-06, |
|
"loss": 2.6101, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.42895471337291513, |
|
"learning_rate": 7.128896457825364e-06, |
|
"loss": 2.5302, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.46383576939242727, |
|
"learning_rate": 7.124117796049199e-06, |
|
"loss": 2.5467, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.46391080952373015, |
|
"learning_rate": 7.11933676560411e-06, |
|
"loss": 2.5796, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.43071291553448376, |
|
"learning_rate": 7.114553371821571e-06, |
|
"loss": 2.6143, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.42639088514580703, |
|
"learning_rate": 7.109767620035689e-06, |
|
"loss": 2.5842, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.4565996104308099, |
|
"learning_rate": 7.104979515583206e-06, |
|
"loss": 2.6329, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.43550191741328553, |
|
"learning_rate": 7.10018906380348e-06, |
|
"loss": 2.5939, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.43752917124361557, |
|
"learning_rate": 7.0953962700384935e-06, |
|
"loss": 2.4761, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.45127428221951654, |
|
"learning_rate": 7.090601139632838e-06, |
|
"loss": 2.6407, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.44095063114958083, |
|
"learning_rate": 7.0858036779337095e-06, |
|
"loss": 2.4693, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.4569369650853151, |
|
"learning_rate": 7.081003890290905e-06, |
|
"loss": 2.6409, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.44391721342003115, |
|
"learning_rate": 7.076201782056816e-06, |
|
"loss": 2.5961, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.4809099048433727, |
|
"learning_rate": 7.071397358586418e-06, |
|
"loss": 2.7127, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.44804630695230513, |
|
"learning_rate": 7.066590625237274e-06, |
|
"loss": 2.5516, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.4659208433166112, |
|
"learning_rate": 7.061781587369518e-06, |
|
"loss": 2.6022, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.44289040654813444, |
|
"learning_rate": 7.056970250345857e-06, |
|
"loss": 2.5322, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.43869869323441935, |
|
"learning_rate": 7.052156619531559e-06, |
|
"loss": 2.5477, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.46781991237967846, |
|
"learning_rate": 7.047340700294454e-06, |
|
"loss": 2.5365, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.5023220205872594, |
|
"learning_rate": 7.042522498004918e-06, |
|
"loss": 2.6717, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.46706191323763, |
|
"learning_rate": 7.037702018035878e-06, |
|
"loss": 2.5515, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.4628447819796725, |
|
"learning_rate": 7.032879265762799e-06, |
|
"loss": 2.655, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.4363362126296687, |
|
"learning_rate": 7.028054246563679e-06, |
|
"loss": 2.4911, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.45657932728300715, |
|
"learning_rate": 7.023226965819046e-06, |
|
"loss": 2.5302, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.4607815559518134, |
|
"learning_rate": 7.01839742891195e-06, |
|
"loss": 2.526, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.4160020494708598, |
|
"learning_rate": 7.013565641227954e-06, |
|
"loss": 2.4879, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.4302714974199038, |
|
"learning_rate": 7.008731608155133e-06, |
|
"loss": 2.6262, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.41895900606126363, |
|
"learning_rate": 7.003895335084065e-06, |
|
"loss": 2.4959, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.47937350542713325, |
|
"learning_rate": 6.999056827407828e-06, |
|
"loss": 2.5052, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.4615463825839077, |
|
"learning_rate": 6.994216090521991e-06, |
|
"loss": 2.6507, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.43451698919033627, |
|
"learning_rate": 6.989373129824605e-06, |
|
"loss": 2.4712, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.46028446554987273, |
|
"learning_rate": 6.984527950716207e-06, |
|
"loss": 2.7052, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.4737731411095733, |
|
"learning_rate": 6.979680558599803e-06, |
|
"loss": 2.5551, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.41582506140366343, |
|
"learning_rate": 6.97483095888087e-06, |
|
"loss": 2.5221, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.44955640625002374, |
|
"learning_rate": 6.969979156967343e-06, |
|
"loss": 2.5921, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.41587551426031605, |
|
"learning_rate": 6.965125158269619e-06, |
|
"loss": 2.4685, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.506174379752642, |
|
"learning_rate": 6.960268968200539e-06, |
|
"loss": 2.642, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.5083226209303883, |
|
"learning_rate": 6.955410592175389e-06, |
|
"loss": 2.4504, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.5037711206272876, |
|
"learning_rate": 6.950550035611894e-06, |
|
"loss": 2.6059, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.489354807362807, |
|
"learning_rate": 6.945687303930208e-06, |
|
"loss": 2.5988, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.43832044647163076, |
|
"learning_rate": 6.940822402552914e-06, |
|
"loss": 2.632, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.41684219914751336, |
|
"learning_rate": 6.935955336905012e-06, |
|
"loss": 2.4968, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.4551566387768076, |
|
"learning_rate": 6.931086112413919e-06, |
|
"loss": 2.5133, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.44312495281396785, |
|
"learning_rate": 6.926214734509451e-06, |
|
"loss": 2.6446, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.4666828989997829, |
|
"learning_rate": 6.921341208623838e-06, |
|
"loss": 2.7543, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.46668743303052485, |
|
"learning_rate": 6.916465540191692e-06, |
|
"loss": 2.6665, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.4498249441961465, |
|
"learning_rate": 6.911587734650024e-06, |
|
"loss": 2.6079, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.4510315846233333, |
|
"learning_rate": 6.906707797438223e-06, |
|
"loss": 2.611, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.4573631385730997, |
|
"learning_rate": 6.901825733998056e-06, |
|
"loss": 2.4184, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.4715636369250365, |
|
"learning_rate": 6.896941549773666e-06, |
|
"loss": 2.6183, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.4484713099690168, |
|
"learning_rate": 6.892055250211552e-06, |
|
"loss": 2.5424, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.4316676204749411, |
|
"learning_rate": 6.8871668407605796e-06, |
|
"loss": 2.535, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.43208885222760374, |
|
"learning_rate": 6.88227632687196e-06, |
|
"loss": 2.4664, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.4406342295768975, |
|
"learning_rate": 6.87738371399926e-06, |
|
"loss": 2.6311, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.4577854161502282, |
|
"learning_rate": 6.872489007598381e-06, |
|
"loss": 2.5439, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.4600202194206776, |
|
"learning_rate": 6.867592213127559e-06, |
|
"loss": 2.6258, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.44800496524801076, |
|
"learning_rate": 6.862693336047361e-06, |
|
"loss": 2.521, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.46499483874748343, |
|
"learning_rate": 6.8577923818206724e-06, |
|
"loss": 2.5714, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.43889677646580477, |
|
"learning_rate": 6.852889355912702e-06, |
|
"loss": 2.5829, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.4844385997487572, |
|
"learning_rate": 6.8479842637909624e-06, |
|
"loss": 2.5358, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.4568251772849157, |
|
"learning_rate": 6.84307711092527e-06, |
|
"loss": 2.459, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.43900774957164473, |
|
"learning_rate": 6.8381679027877456e-06, |
|
"loss": 2.5391, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.4354751938478969, |
|
"learning_rate": 6.833256644852795e-06, |
|
"loss": 2.5212, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.45457266704424526, |
|
"learning_rate": 6.828343342597114e-06, |
|
"loss": 2.6235, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.4757255960119849, |
|
"learning_rate": 6.823428001499677e-06, |
|
"loss": 2.5648, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.47773191150395056, |
|
"learning_rate": 6.81851062704173e-06, |
|
"loss": 2.7091, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.4584731156153877, |
|
"learning_rate": 6.813591224706788e-06, |
|
"loss": 2.6848, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.4394450886309457, |
|
"learning_rate": 6.8086697999806295e-06, |
|
"loss": 2.5986, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.4544126859932802, |
|
"learning_rate": 6.803746358351285e-06, |
|
"loss": 2.6201, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.45893198891226583, |
|
"learning_rate": 6.798820905309036e-06, |
|
"loss": 2.6446, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.46888579944630593, |
|
"learning_rate": 6.793893446346405e-06, |
|
"loss": 2.601, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.5002077386363459, |
|
"learning_rate": 6.788963986958153e-06, |
|
"loss": 2.5332, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.46513972756972205, |
|
"learning_rate": 6.784032532641273e-06, |
|
"loss": 2.6863, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.4698499489753665, |
|
"learning_rate": 6.779099088894979e-06, |
|
"loss": 2.6624, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.5120231363761293, |
|
"learning_rate": 6.774163661220708e-06, |
|
"loss": 2.5264, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.43200849159270543, |
|
"learning_rate": 6.769226255122104e-06, |
|
"loss": 2.4693, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.43094610204264894, |
|
"learning_rate": 6.764286876105019e-06, |
|
"loss": 2.5116, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.4605835347553326, |
|
"learning_rate": 6.759345529677512e-06, |
|
"loss": 2.5829, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.4710670425663677, |
|
"learning_rate": 6.754402221349825e-06, |
|
"loss": 2.4705, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.439500763748165, |
|
"learning_rate": 6.749456956634393e-06, |
|
"loss": 2.6056, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.4895156211562015, |
|
"learning_rate": 6.744509741045835e-06, |
|
"loss": 2.5483, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.42577663034155283, |
|
"learning_rate": 6.739560580100941e-06, |
|
"loss": 2.6488, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.4707206627835328, |
|
"learning_rate": 6.734609479318671e-06, |
|
"loss": 2.5462, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.4601301137240529, |
|
"learning_rate": 6.7296564442201524e-06, |
|
"loss": 2.5521, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.4450602235118456, |
|
"learning_rate": 6.7247014803286645e-06, |
|
"loss": 2.5476, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.4349271530782206, |
|
"learning_rate": 6.719744593169642e-06, |
|
"loss": 2.5598, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.46199999407153103, |
|
"learning_rate": 6.714785788270658e-06, |
|
"loss": 2.5668, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.4779666476345309, |
|
"learning_rate": 6.709825071161431e-06, |
|
"loss": 2.5069, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.46497301500221716, |
|
"learning_rate": 6.704862447373808e-06, |
|
"loss": 2.5101, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.47375077813721217, |
|
"learning_rate": 6.699897922441762e-06, |
|
"loss": 2.638, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.4643578754928264, |
|
"learning_rate": 6.6949315019013895e-06, |
|
"loss": 2.6091, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.44590990249792184, |
|
"learning_rate": 6.689963191290897e-06, |
|
"loss": 2.5877, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.46897866711565833, |
|
"learning_rate": 6.684992996150599e-06, |
|
"loss": 2.6591, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.45022271790738155, |
|
"learning_rate": 6.680020922022914e-06, |
|
"loss": 2.5427, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.4587583269304219, |
|
"learning_rate": 6.675046974452354e-06, |
|
"loss": 2.5613, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.49630174256587056, |
|
"learning_rate": 6.670071158985521e-06, |
|
"loss": 2.7151, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.44245111809691745, |
|
"learning_rate": 6.6650934811710984e-06, |
|
"loss": 2.5714, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.4787174499890503, |
|
"learning_rate": 6.6601139465598485e-06, |
|
"loss": 2.5309, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.4512958769051424, |
|
"learning_rate": 6.655132560704602e-06, |
|
"loss": 2.54, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.443250600772457, |
|
"learning_rate": 6.650149329160258e-06, |
|
"loss": 2.6532, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.4509528813478273, |
|
"learning_rate": 6.645164257483767e-06, |
|
"loss": 2.634, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.4363624395334654, |
|
"learning_rate": 6.640177351234137e-06, |
|
"loss": 2.5797, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.472388215163046, |
|
"learning_rate": 6.635188615972421e-06, |
|
"loss": 2.493, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.43214345788970226, |
|
"learning_rate": 6.63019805726171e-06, |
|
"loss": 2.5832, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.47246380505868424, |
|
"learning_rate": 6.625205680667128e-06, |
|
"loss": 2.6045, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.45820506063769545, |
|
"learning_rate": 6.62021149175583e-06, |
|
"loss": 2.6442, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.4635029537308452, |
|
"learning_rate": 6.615215496096987e-06, |
|
"loss": 2.6954, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.46448871934946745, |
|
"learning_rate": 6.610217699261788e-06, |
|
"loss": 2.4956, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.4633216299950541, |
|
"learning_rate": 6.60521810682343e-06, |
|
"loss": 2.4558, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.47388358801825714, |
|
"learning_rate": 6.600216724357112e-06, |
|
"loss": 2.6062, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.47421097222726205, |
|
"learning_rate": 6.595213557440026e-06, |
|
"loss": 2.7121, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.44792592977955425, |
|
"learning_rate": 6.59020861165136e-06, |
|
"loss": 2.6372, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.4331420401321665, |
|
"learning_rate": 6.5852018925722815e-06, |
|
"loss": 2.5859, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.44548810875988937, |
|
"learning_rate": 6.580193405785939e-06, |
|
"loss": 2.5591, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.4460985206843281, |
|
"learning_rate": 6.575183156877446e-06, |
|
"loss": 2.5174, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.4597105895933579, |
|
"learning_rate": 6.570171151433887e-06, |
|
"loss": 2.5861, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.4564653136768479, |
|
"learning_rate": 6.5651573950443015e-06, |
|
"loss": 2.6458, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.4934156625854828, |
|
"learning_rate": 6.5601418932996865e-06, |
|
"loss": 2.634, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.4940027941844852, |
|
"learning_rate": 6.555124651792978e-06, |
|
"loss": 2.5543, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.47859111891025474, |
|
"learning_rate": 6.550105676119057e-06, |
|
"loss": 2.593, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.45188261584964545, |
|
"learning_rate": 6.545084971874738e-06, |
|
"loss": 2.5211, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.47426024103041675, |
|
"learning_rate": 6.540062544658763e-06, |
|
"loss": 2.572, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.48733240633546737, |
|
"learning_rate": 6.535038400071792e-06, |
|
"loss": 2.575, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.522723835907445, |
|
"learning_rate": 6.530012543716406e-06, |
|
"loss": 2.4252, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.4467541631302905, |
|
"learning_rate": 6.524984981197089e-06, |
|
"loss": 2.4924, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.4843494500835704, |
|
"learning_rate": 6.519955718120231e-06, |
|
"loss": 2.6027, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.45544520295980023, |
|
"learning_rate": 6.514924760094119e-06, |
|
"loss": 2.5955, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.4352749030971699, |
|
"learning_rate": 6.509892112728928e-06, |
|
"loss": 2.4625, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.4650983994465635, |
|
"learning_rate": 6.504857781636717e-06, |
|
"loss": 2.5992, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.44426386322003814, |
|
"learning_rate": 6.499821772431421e-06, |
|
"loss": 2.6038, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.44912655755103936, |
|
"learning_rate": 6.494784090728852e-06, |
|
"loss": 2.5598, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.44476956179303945, |
|
"learning_rate": 6.489744742146682e-06, |
|
"loss": 2.5912, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.4724154528911073, |
|
"learning_rate": 6.4847037323044435e-06, |
|
"loss": 2.6931, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.46437449772269807, |
|
"learning_rate": 6.47966106682352e-06, |
|
"loss": 2.5635, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.4749528246592278, |
|
"learning_rate": 6.474616751327143e-06, |
|
"loss": 2.6371, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.4471321073249305, |
|
"learning_rate": 6.469570791440385e-06, |
|
"loss": 2.5681, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.46911193861592976, |
|
"learning_rate": 6.464523192790149e-06, |
|
"loss": 2.5962, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.4611190987744763, |
|
"learning_rate": 6.459473961005168e-06, |
|
"loss": 2.5888, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.4508996282471076, |
|
"learning_rate": 6.454423101715994e-06, |
|
"loss": 2.6209, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.4605768700329021, |
|
"learning_rate": 6.4493706205549965e-06, |
|
"loss": 2.5022, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.45844750355088554, |
|
"learning_rate": 6.444316523156352e-06, |
|
"loss": 2.5334, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.4604012554345468, |
|
"learning_rate": 6.439260815156039e-06, |
|
"loss": 2.5601, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.4762823813736404, |
|
"learning_rate": 6.434203502191832e-06, |
|
"loss": 2.6728, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.45712664008113646, |
|
"learning_rate": 6.429144589903299e-06, |
|
"loss": 2.5942, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.47406668699524035, |
|
"learning_rate": 6.4240840839317845e-06, |
|
"loss": 2.6325, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.44825053155317557, |
|
"learning_rate": 6.419021989920416e-06, |
|
"loss": 2.5028, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.4775190914791269, |
|
"learning_rate": 6.41395831351409e-06, |
|
"loss": 2.6418, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.4921926921994469, |
|
"learning_rate": 6.408893060359466e-06, |
|
"loss": 2.6952, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.4780549615162635, |
|
"learning_rate": 6.4038262361049655e-06, |
|
"loss": 2.5216, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.4768563912136851, |
|
"learning_rate": 6.398757846400757e-06, |
|
"loss": 2.4883, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.46607741771988215, |
|
"learning_rate": 6.393687896898759e-06, |
|
"loss": 2.5663, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.4746852547226443, |
|
"learning_rate": 6.388616393252628e-06, |
|
"loss": 2.675, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.48522883193253036, |
|
"learning_rate": 6.3835433411177515e-06, |
|
"loss": 2.5872, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.4667006517138037, |
|
"learning_rate": 6.378468746151247e-06, |
|
"loss": 2.5696, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.4873283090007363, |
|
"learning_rate": 6.373392614011952e-06, |
|
"loss": 2.5399, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.47787788088501654, |
|
"learning_rate": 6.368314950360416e-06, |
|
"loss": 2.6245, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.4765652515769694, |
|
"learning_rate": 6.363235760858896e-06, |
|
"loss": 2.5061, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.4800782826782663, |
|
"learning_rate": 6.358155051171354e-06, |
|
"loss": 2.5323, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.4552714710692263, |
|
"learning_rate": 6.3530728269634454e-06, |
|
"loss": 2.5443, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.46310046626499785, |
|
"learning_rate": 6.347989093902514e-06, |
|
"loss": 2.6454, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.44389307777978326, |
|
"learning_rate": 6.342903857657585e-06, |
|
"loss": 2.6258, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.4930022625784188, |
|
"learning_rate": 6.3378171238993636e-06, |
|
"loss": 2.555, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.4613177137629376, |
|
"learning_rate": 6.3327288983002215e-06, |
|
"loss": 2.6009, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.4990186896350244, |
|
"learning_rate": 6.327639186534195e-06, |
|
"loss": 2.5925, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.4721129423818182, |
|
"learning_rate": 6.322547994276978e-06, |
|
"loss": 2.6107, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.4378291833055506, |
|
"learning_rate": 6.317455327205916e-06, |
|
"loss": 2.5039, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.5307764860202739, |
|
"learning_rate": 6.312361190999997e-06, |
|
"loss": 2.5824, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.4465787185489355, |
|
"learning_rate": 6.307265591339851e-06, |
|
"loss": 2.6157, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.44275859846600296, |
|
"learning_rate": 6.302168533907735e-06, |
|
"loss": 2.6128, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.5096919565573148, |
|
"learning_rate": 6.297070024387535e-06, |
|
"loss": 2.5575, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.46041151521427404, |
|
"learning_rate": 6.291970068464755e-06, |
|
"loss": 2.4884, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.45423030873995046, |
|
"learning_rate": 6.286868671826513e-06, |
|
"loss": 2.5099, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.45244274077454344, |
|
"learning_rate": 6.281765840161532e-06, |
|
"loss": 2.5465, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.4463492254861338, |
|
"learning_rate": 6.276661579160137e-06, |
|
"loss": 2.6482, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.45096646211265967, |
|
"learning_rate": 6.271555894514245e-06, |
|
"loss": 2.5977, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.44575906414202116, |
|
"learning_rate": 6.266448791917364e-06, |
|
"loss": 2.6061, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.606917389509902, |
|
"learning_rate": 6.261340277064578e-06, |
|
"loss": 2.6464, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.47613827005523224, |
|
"learning_rate": 6.25623035565255e-06, |
|
"loss": 2.5408, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.4532061701453479, |
|
"learning_rate": 6.2511190333795115e-06, |
|
"loss": 2.5762, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.46327531498778945, |
|
"learning_rate": 6.246006315945251e-06, |
|
"loss": 2.6817, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.4753389755188805, |
|
"learning_rate": 6.24089220905112e-06, |
|
"loss": 2.5167, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.45448269180456724, |
|
"learning_rate": 6.235776718400015e-06, |
|
"loss": 2.6865, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.5156535805381762, |
|
"learning_rate": 6.230659849696375e-06, |
|
"loss": 2.6058, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.4707312990992745, |
|
"learning_rate": 6.2255416086461795e-06, |
|
"loss": 2.5054, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.4975422116204126, |
|
"learning_rate": 6.220422000956936e-06, |
|
"loss": 2.6425, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.4860390106348795, |
|
"learning_rate": 6.215301032337674e-06, |
|
"loss": 2.6584, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.47906318010056725, |
|
"learning_rate": 6.210178708498945e-06, |
|
"loss": 2.5822, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.4582887613413841, |
|
"learning_rate": 6.2050550351528096e-06, |
|
"loss": 2.5963, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.4872350241609007, |
|
"learning_rate": 6.19993001801283e-06, |
|
"loss": 2.5957, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.45391871555039404, |
|
"learning_rate": 6.194803662794073e-06, |
|
"loss": 2.5665, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.47199458318568166, |
|
"learning_rate": 6.189675975213094e-06, |
|
"loss": 2.4332, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.4751959421676559, |
|
"learning_rate": 6.184546960987936e-06, |
|
"loss": 2.6663, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.5003458853262622, |
|
"learning_rate": 6.179416625838116e-06, |
|
"loss": 2.5985, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.509987735226826, |
|
"learning_rate": 6.174284975484632e-06, |
|
"loss": 2.6042, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.5006315593789818, |
|
"learning_rate": 6.169152015649944e-06, |
|
"loss": 2.6218, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.51435661366489, |
|
"learning_rate": 6.164017752057972e-06, |
|
"loss": 2.5504, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.4925983470181212, |
|
"learning_rate": 6.158882190434092e-06, |
|
"loss": 2.6199, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.4758692999734635, |
|
"learning_rate": 6.153745336505125e-06, |
|
"loss": 2.642, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.45794451180555695, |
|
"learning_rate": 6.148607195999335e-06, |
|
"loss": 2.5597, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.4991582102475872, |
|
"learning_rate": 6.143467774646421e-06, |
|
"loss": 2.589, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.4626964693067074, |
|
"learning_rate": 6.13832707817751e-06, |
|
"loss": 2.5227, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.4451044500117788, |
|
"learning_rate": 6.1331851123251465e-06, |
|
"loss": 2.5149, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.4473710914434683, |
|
"learning_rate": 6.128041882823298e-06, |
|
"loss": 2.5892, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.4675862170597165, |
|
"learning_rate": 6.122897395407337e-06, |
|
"loss": 2.5835, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.5179120408166407, |
|
"learning_rate": 6.117751655814037e-06, |
|
"loss": 2.5477, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.48641058594229025, |
|
"learning_rate": 6.112604669781572e-06, |
|
"loss": 2.5264, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.4798993093459797, |
|
"learning_rate": 6.107456443049504e-06, |
|
"loss": 2.5979, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.45983074961829723, |
|
"learning_rate": 6.10230698135878e-06, |
|
"loss": 2.7149, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.4516532443780673, |
|
"learning_rate": 6.097156290451721e-06, |
|
"loss": 2.5407, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.4529765971287995, |
|
"learning_rate": 6.092004376072021e-06, |
|
"loss": 2.5561, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.4649889102249175, |
|
"learning_rate": 6.0868512439647345e-06, |
|
"loss": 2.5492, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.4721703987887475, |
|
"learning_rate": 6.081696899876282e-06, |
|
"loss": 2.3582, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.4714606272003474, |
|
"learning_rate": 6.076541349554429e-06, |
|
"loss": 2.5557, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.47315572719495963, |
|
"learning_rate": 6.071384598748288e-06, |
|
"loss": 2.5301, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.4743054990906478, |
|
"learning_rate": 6.066226653208309e-06, |
|
"loss": 2.54, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.47572471896668495, |
|
"learning_rate": 6.061067518686277e-06, |
|
"loss": 2.5529, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.4942032544399013, |
|
"learning_rate": 6.0559072009352995e-06, |
|
"loss": 2.4468, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.49834056497619217, |
|
"learning_rate": 6.0507457057098064e-06, |
|
"loss": 2.4629, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.46575690583675217, |
|
"learning_rate": 6.045583038765538e-06, |
|
"loss": 2.6405, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.46617524002776023, |
|
"learning_rate": 6.0404192058595435e-06, |
|
"loss": 2.4729, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.44875109005638913, |
|
"learning_rate": 6.035254212750172e-06, |
|
"loss": 2.5976, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.4865843928807325, |
|
"learning_rate": 6.030088065197065e-06, |
|
"loss": 2.4187, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.4611861641188956, |
|
"learning_rate": 6.024920768961153e-06, |
|
"loss": 2.7054, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.4868159318039705, |
|
"learning_rate": 6.019752329804645e-06, |
|
"loss": 2.5518, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.45295034528425004, |
|
"learning_rate": 6.014582753491027e-06, |
|
"loss": 2.5824, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.4982243539571222, |
|
"learning_rate": 6.009412045785051e-06, |
|
"loss": 2.5801, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.47081479849103025, |
|
"learning_rate": 6.004240212452735e-06, |
|
"loss": 2.5029, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.4442053634445994, |
|
"learning_rate": 5.999067259261347e-06, |
|
"loss": 2.4948, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.4553893011822898, |
|
"learning_rate": 5.993893191979403e-06, |
|
"loss": 2.5039, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.4885722704522011, |
|
"learning_rate": 5.98871801637667e-06, |
|
"loss": 2.5194, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.45898969931253597, |
|
"learning_rate": 5.983541738224141e-06, |
|
"loss": 2.5069, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.43885936042518603, |
|
"learning_rate": 5.978364363294044e-06, |
|
"loss": 2.5667, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.4696773574657853, |
|
"learning_rate": 5.973185897359828e-06, |
|
"loss": 2.5707, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.46140705050441955, |
|
"learning_rate": 5.968006346196158e-06, |
|
"loss": 2.5789, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.46456435967376847, |
|
"learning_rate": 5.962825715578914e-06, |
|
"loss": 2.5773, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.4994753230493636, |
|
"learning_rate": 5.957644011285173e-06, |
|
"loss": 2.531, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.4488393093272709, |
|
"learning_rate": 5.952461239093215e-06, |
|
"loss": 2.5406, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.5037959369889324, |
|
"learning_rate": 5.947277404782505e-06, |
|
"loss": 2.5275, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.5038023849018948, |
|
"learning_rate": 5.942092514133697e-06, |
|
"loss": 2.5516, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.4980262752087501, |
|
"learning_rate": 5.936906572928625e-06, |
|
"loss": 2.6257, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.47339554969076136, |
|
"learning_rate": 5.931719586950286e-06, |
|
"loss": 2.5523, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.5050212441593233, |
|
"learning_rate": 5.92653156198285e-06, |
|
"loss": 2.6781, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.46148450366771243, |
|
"learning_rate": 5.9213425038116415e-06, |
|
"loss": 2.5813, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.4878858262155164, |
|
"learning_rate": 5.91615241822314e-06, |
|
"loss": 2.611, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.45313769355621, |
|
"learning_rate": 5.910961311004968e-06, |
|
"loss": 2.562, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.4792309455327243, |
|
"learning_rate": 5.905769187945889e-06, |
|
"loss": 2.4339, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.5114020321139143, |
|
"learning_rate": 5.900576054835798e-06, |
|
"loss": 2.6092, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.4735465531601242, |
|
"learning_rate": 5.895381917465718e-06, |
|
"loss": 2.5793, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.5487065027763396, |
|
"learning_rate": 5.890186781627791e-06, |
|
"loss": 2.5962, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.5024300176369647, |
|
"learning_rate": 5.884990653115272e-06, |
|
"loss": 2.5927, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.4864913176382235, |
|
"learning_rate": 5.879793537722525e-06, |
|
"loss": 2.6285, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.4650853595013966, |
|
"learning_rate": 5.874595441245008e-06, |
|
"loss": 2.5393, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.4410323863734167, |
|
"learning_rate": 5.8693963694792825e-06, |
|
"loss": 2.5834, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.47980881936799635, |
|
"learning_rate": 5.864196328222991e-06, |
|
"loss": 2.6105, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.47461687874847147, |
|
"learning_rate": 5.85899532327486e-06, |
|
"loss": 2.4965, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.48759088158174807, |
|
"learning_rate": 5.853793360434687e-06, |
|
"loss": 2.5663, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.46464502270002156, |
|
"learning_rate": 5.848590445503345e-06, |
|
"loss": 2.4455, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.4564720384300828, |
|
"learning_rate": 5.843386584282762e-06, |
|
"loss": 2.5382, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.4571946984830581, |
|
"learning_rate": 5.838181782575921e-06, |
|
"loss": 2.5204, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.4640765229749111, |
|
"learning_rate": 5.832976046186859e-06, |
|
"loss": 2.5873, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 2.0440249803438744, |
|
"learning_rate": 5.82776938092065e-06, |
|
"loss": 2.5299, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.46834957185719356, |
|
"learning_rate": 5.822561792583409e-06, |
|
"loss": 2.5228, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.4903612010783117, |
|
"learning_rate": 5.817353286982276e-06, |
|
"loss": 2.5942, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.512427094554068, |
|
"learning_rate": 5.812143869925415e-06, |
|
"loss": 2.6052, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.46534205422909963, |
|
"learning_rate": 5.8069335472220066e-06, |
|
"loss": 2.4895, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.4722720547247072, |
|
"learning_rate": 5.801722324682243e-06, |
|
"loss": 2.6285, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.46411280503858243, |
|
"learning_rate": 5.796510208117317e-06, |
|
"loss": 2.6635, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.47830408831270155, |
|
"learning_rate": 5.79129720333942e-06, |
|
"loss": 2.4386, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.46511528354984577, |
|
"learning_rate": 5.786083316161735e-06, |
|
"loss": 2.5507, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.46147315372523634, |
|
"learning_rate": 5.780868552398424e-06, |
|
"loss": 2.6121, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.45998273988145016, |
|
"learning_rate": 5.775652917864633e-06, |
|
"loss": 2.5347, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.4731335555024925, |
|
"learning_rate": 5.770436418376477e-06, |
|
"loss": 2.6014, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.479270433164431, |
|
"learning_rate": 5.765219059751033e-06, |
|
"loss": 2.5795, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.4797835232182591, |
|
"learning_rate": 5.760000847806337e-06, |
|
"loss": 2.6403, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.5111352636958504, |
|
"learning_rate": 5.754781788361376e-06, |
|
"loss": 2.5856, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.4926179638330841, |
|
"learning_rate": 5.749561887236088e-06, |
|
"loss": 2.5253, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.5019845321863531, |
|
"learning_rate": 5.744341150251342e-06, |
|
"loss": 2.6454, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.4989025320361983, |
|
"learning_rate": 5.739119583228942e-06, |
|
"loss": 2.6333, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.4723994921199449, |
|
"learning_rate": 5.733897191991616e-06, |
|
"loss": 2.5501, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.47938048388193044, |
|
"learning_rate": 5.728673982363017e-06, |
|
"loss": 2.6093, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.48023812256085124, |
|
"learning_rate": 5.723449960167703e-06, |
|
"loss": 2.5757, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.4621477241758881, |
|
"learning_rate": 5.718225131231142e-06, |
|
"loss": 2.574, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.4799222862394652, |
|
"learning_rate": 5.712999501379701e-06, |
|
"loss": 2.7045, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.47353058331138714, |
|
"learning_rate": 5.70777307644064e-06, |
|
"loss": 2.6185, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.47427909253482975, |
|
"learning_rate": 5.702545862242107e-06, |
|
"loss": 2.6429, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.546562050785951, |
|
"learning_rate": 5.697317864613127e-06, |
|
"loss": 2.6603, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.460793440338349, |
|
"learning_rate": 5.692089089383603e-06, |
|
"loss": 2.5108, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.4673929279538334, |
|
"learning_rate": 5.686859542384301e-06, |
|
"loss": 2.5553, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.46737622177890714, |
|
"learning_rate": 5.68162922944685e-06, |
|
"loss": 2.6226, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.47916144561687396, |
|
"learning_rate": 5.676398156403733e-06, |
|
"loss": 2.5185, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.45965290486581706, |
|
"learning_rate": 5.671166329088278e-06, |
|
"loss": 2.6296, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.5029248579960017, |
|
"learning_rate": 5.665933753334658e-06, |
|
"loss": 2.6806, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.5146165001820266, |
|
"learning_rate": 5.660700434977877e-06, |
|
"loss": 2.5208, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.5018285752673409, |
|
"learning_rate": 5.6554663798537715e-06, |
|
"loss": 2.4773, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.46568363511761657, |
|
"learning_rate": 5.650231593798994e-06, |
|
"loss": 2.5809, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.462076719005681, |
|
"learning_rate": 5.644996082651018e-06, |
|
"loss": 2.5424, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.5042890233404459, |
|
"learning_rate": 5.639759852248119e-06, |
|
"loss": 2.6211, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.5198241247262678, |
|
"learning_rate": 5.634522908429382e-06, |
|
"loss": 2.579, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.4421819437169893, |
|
"learning_rate": 5.629285257034682e-06, |
|
"loss": 2.6423, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.509712274909368, |
|
"learning_rate": 5.624046903904684e-06, |
|
"loss": 2.6235, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.4499040435103113, |
|
"learning_rate": 5.6188078548808366e-06, |
|
"loss": 2.5385, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.4845657588278896, |
|
"learning_rate": 5.613568115805364e-06, |
|
"loss": 2.59, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.4577609374918333, |
|
"learning_rate": 5.608327692521263e-06, |
|
"loss": 2.4767, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.4596689030135519, |
|
"learning_rate": 5.603086590872287e-06, |
|
"loss": 2.451, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.5070629456756863, |
|
"learning_rate": 5.597844816702948e-06, |
|
"loss": 2.6694, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.5091104791838418, |
|
"learning_rate": 5.592602375858515e-06, |
|
"loss": 2.6015, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.48373983638643475, |
|
"learning_rate": 5.587359274184989e-06, |
|
"loss": 2.6154, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.4664897466965815, |
|
"learning_rate": 5.582115517529114e-06, |
|
"loss": 2.56, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.4886115063550699, |
|
"learning_rate": 5.576871111738368e-06, |
|
"loss": 2.5047, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.5054712168708521, |
|
"learning_rate": 5.571626062660947e-06, |
|
"loss": 2.5823, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.4811021769752877, |
|
"learning_rate": 5.566380376145762e-06, |
|
"loss": 2.6173, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.4886745089940987, |
|
"learning_rate": 5.5611340580424445e-06, |
|
"loss": 2.541, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.5146651147677359, |
|
"learning_rate": 5.555887114201324e-06, |
|
"loss": 2.6584, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.49482881172356813, |
|
"learning_rate": 5.550639550473426e-06, |
|
"loss": 2.6346, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.4750767984545818, |
|
"learning_rate": 5.545391372710469e-06, |
|
"loss": 2.6069, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.4901318689909373, |
|
"learning_rate": 5.540142586764862e-06, |
|
"loss": 2.517, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.4862073878663385, |
|
"learning_rate": 5.5348931984896836e-06, |
|
"loss": 2.5365, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.4954697897715803, |
|
"learning_rate": 5.529643213738689e-06, |
|
"loss": 2.5685, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.48910530743925806, |
|
"learning_rate": 5.524392638366296e-06, |
|
"loss": 2.5707, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.46529677298505107, |
|
"learning_rate": 5.519141478227582e-06, |
|
"loss": 2.6055, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.4724731721275606, |
|
"learning_rate": 5.51388973917828e-06, |
|
"loss": 2.4231, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.48248346221332467, |
|
"learning_rate": 5.508637427074762e-06, |
|
"loss": 2.5628, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.4940948766707954, |
|
"learning_rate": 5.503384547774043e-06, |
|
"loss": 2.6035, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.4926925520221123, |
|
"learning_rate": 5.4981311071337704e-06, |
|
"loss": 2.5257, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.47903165435525097, |
|
"learning_rate": 5.4928771110122185e-06, |
|
"loss": 2.5344, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.45944567285937526, |
|
"learning_rate": 5.4876225652682776e-06, |
|
"loss": 2.5259, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.4865373521405356, |
|
"learning_rate": 5.482367475761452e-06, |
|
"loss": 2.5353, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.4873319935458423, |
|
"learning_rate": 5.477111848351856e-06, |
|
"loss": 2.4752, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.48027422308809087, |
|
"learning_rate": 5.471855688900198e-06, |
|
"loss": 2.4997, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.4550806790390709, |
|
"learning_rate": 5.466599003267787e-06, |
|
"loss": 2.5089, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.47542219848601747, |
|
"learning_rate": 5.46134179731651e-06, |
|
"loss": 2.4786, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.48007773949504423, |
|
"learning_rate": 5.456084076908842e-06, |
|
"loss": 2.5718, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.4803538301316188, |
|
"learning_rate": 5.450825847907824e-06, |
|
"loss": 2.6203, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.47772256956696457, |
|
"learning_rate": 5.445567116177073e-06, |
|
"loss": 2.4979, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.489455284119979, |
|
"learning_rate": 5.44030788758076e-06, |
|
"loss": 2.6273, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.4568761578713499, |
|
"learning_rate": 5.435048167983613e-06, |
|
"loss": 2.6025, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.47313892530460566, |
|
"learning_rate": 5.429787963250905e-06, |
|
"loss": 2.6403, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.4793583514171144, |
|
"learning_rate": 5.424527279248452e-06, |
|
"loss": 2.5721, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.4890874996437913, |
|
"learning_rate": 5.419266121842608e-06, |
|
"loss": 2.6014, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.4872584502581997, |
|
"learning_rate": 5.414004496900246e-06, |
|
"loss": 2.5391, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.4875954656782858, |
|
"learning_rate": 5.408742410288769e-06, |
|
"loss": 2.5849, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.4768472039957424, |
|
"learning_rate": 5.403479867876087e-06, |
|
"loss": 2.6094, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.5100692780040483, |
|
"learning_rate": 5.398216875530626e-06, |
|
"loss": 2.5434, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.481302514864662, |
|
"learning_rate": 5.392953439121311e-06, |
|
"loss": 2.5323, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.48384092481853075, |
|
"learning_rate": 5.387689564517558e-06, |
|
"loss": 2.4878, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.47298535010674064, |
|
"learning_rate": 5.382425257589277e-06, |
|
"loss": 2.5716, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.5007152210333363, |
|
"learning_rate": 5.377160524206859e-06, |
|
"loss": 2.6258, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.485070559786248, |
|
"learning_rate": 5.371895370241168e-06, |
|
"loss": 2.4971, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.4997944111157027, |
|
"learning_rate": 5.366629801563541e-06, |
|
"loss": 2.5767, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.4777561622573537, |
|
"learning_rate": 5.361363824045772e-06, |
|
"loss": 2.5328, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.44479602845827954, |
|
"learning_rate": 5.356097443560116e-06, |
|
"loss": 2.4995, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.4797680405091772, |
|
"learning_rate": 5.350830665979276e-06, |
|
"loss": 2.5405, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.5033448810968487, |
|
"learning_rate": 5.345563497176397e-06, |
|
"loss": 2.566, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.5028626332441936, |
|
"learning_rate": 5.340295943025059e-06, |
|
"loss": 2.5561, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.49284364470070213, |
|
"learning_rate": 5.335028009399274e-06, |
|
"loss": 2.4635, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.5017514250316445, |
|
"learning_rate": 5.329759702173477e-06, |
|
"loss": 2.6255, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.4739334253606109, |
|
"learning_rate": 5.324491027222519e-06, |
|
"loss": 2.595, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.4716542554992752, |
|
"learning_rate": 5.319221990421662e-06, |
|
"loss": 2.4898, |
|
"step": 1437 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.47736644160252045, |
|
"learning_rate": 5.3139525976465675e-06, |
|
"loss": 2.5573, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.5015361256067403, |
|
"learning_rate": 5.308682854773299e-06, |
|
"loss": 2.4593, |
|
"step": 1439 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.5172313752248289, |
|
"learning_rate": 5.30341276767831e-06, |
|
"loss": 2.4978, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.5025222461198174, |
|
"learning_rate": 5.298142342238434e-06, |
|
"loss": 2.5192, |
|
"step": 1441 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.4934382818794458, |
|
"learning_rate": 5.292871584330885e-06, |
|
"loss": 2.5415, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.46466981214095376, |
|
"learning_rate": 5.287600499833246e-06, |
|
"loss": 2.6038, |
|
"step": 1443 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.4514102883849857, |
|
"learning_rate": 5.282329094623466e-06, |
|
"loss": 2.5755, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.4651572668841609, |
|
"learning_rate": 5.27705737457985e-06, |
|
"loss": 2.6002, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.47035403781097285, |
|
"learning_rate": 5.271785345581057e-06, |
|
"loss": 2.6076, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.558092385820383, |
|
"learning_rate": 5.266513013506085e-06, |
|
"loss": 2.5004, |
|
"step": 1447 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.4995290356092678, |
|
"learning_rate": 5.261240384234274e-06, |
|
"loss": 2.4649, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.5200915076362117, |
|
"learning_rate": 5.255967463645296e-06, |
|
"loss": 2.6154, |
|
"step": 1449 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.4691765751591343, |
|
"learning_rate": 5.2506942576191466e-06, |
|
"loss": 2.5766, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.5212524698505389, |
|
"learning_rate": 5.24542077203614e-06, |
|
"loss": 2.6492, |
|
"step": 1451 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.4885974928051037, |
|
"learning_rate": 5.240147012776899e-06, |
|
"loss": 2.6256, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.46886963906982426, |
|
"learning_rate": 5.234872985722356e-06, |
|
"loss": 2.5161, |
|
"step": 1453 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.5131916078183119, |
|
"learning_rate": 5.229598696753742e-06, |
|
"loss": 2.5721, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.48631933120069404, |
|
"learning_rate": 5.224324151752575e-06, |
|
"loss": 2.6087, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.5230734398885549, |
|
"learning_rate": 5.219049356600665e-06, |
|
"loss": 2.6447, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.46952691861660395, |
|
"learning_rate": 5.213774317180096e-06, |
|
"loss": 2.6004, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.5122081076553424, |
|
"learning_rate": 5.208499039373228e-06, |
|
"loss": 2.5246, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.48279313230710447, |
|
"learning_rate": 5.203223529062684e-06, |
|
"loss": 2.5698, |
|
"step": 1459 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.4798270181057229, |
|
"learning_rate": 5.197947792131348e-06, |
|
"loss": 2.5335, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.5080796932652959, |
|
"learning_rate": 5.192671834462356e-06, |
|
"loss": 2.4676, |
|
"step": 1461 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.4755061589874045, |
|
"learning_rate": 5.187395661939088e-06, |
|
"loss": 2.6214, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.49052447971887797, |
|
"learning_rate": 5.18211928044517e-06, |
|
"loss": 2.6149, |
|
"step": 1463 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.4854295080830313, |
|
"learning_rate": 5.176842695864454e-06, |
|
"loss": 2.5338, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.4953140529523987, |
|
"learning_rate": 5.1715659140810225e-06, |
|
"loss": 2.6496, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.5113368757065954, |
|
"learning_rate": 5.166288940979175e-06, |
|
"loss": 2.5616, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.49523068242668017, |
|
"learning_rate": 5.161011782443429e-06, |
|
"loss": 2.6646, |
|
"step": 1467 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.5023566379347381, |
|
"learning_rate": 5.155734444358503e-06, |
|
"loss": 2.571, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.49808651635476875, |
|
"learning_rate": 5.150456932609321e-06, |
|
"loss": 2.6516, |
|
"step": 1469 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.4959209621823317, |
|
"learning_rate": 5.145179253080997e-06, |
|
"loss": 2.6119, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.49478993331873267, |
|
"learning_rate": 5.139901411658834e-06, |
|
"loss": 2.5604, |
|
"step": 1471 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.49521748807814564, |
|
"learning_rate": 5.134623414228315e-06, |
|
"loss": 2.6045, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.505958046734499, |
|
"learning_rate": 5.129345266675095e-06, |
|
"loss": 2.5482, |
|
"step": 1473 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.5680064255760643, |
|
"learning_rate": 5.124066974885002e-06, |
|
"loss": 2.449, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.46019852125115945, |
|
"learning_rate": 5.118788544744016e-06, |
|
"loss": 2.6183, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.5029274357124954, |
|
"learning_rate": 5.113509982138281e-06, |
|
"loss": 2.5581, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.4825843197766323, |
|
"learning_rate": 5.108231292954081e-06, |
|
"loss": 2.5896, |
|
"step": 1477 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.4802353454848128, |
|
"learning_rate": 5.102952483077846e-06, |
|
"loss": 2.5429, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.4951458533300915, |
|
"learning_rate": 5.09767355839614e-06, |
|
"loss": 2.5251, |
|
"step": 1479 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.5068067420973925, |
|
"learning_rate": 5.09239452479565e-06, |
|
"loss": 2.7136, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.5389926975778253, |
|
"learning_rate": 5.087115388163192e-06, |
|
"loss": 2.5274, |
|
"step": 1481 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.4781809512293107, |
|
"learning_rate": 5.081836154385692e-06, |
|
"loss": 2.5492, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.49952186134161425, |
|
"learning_rate": 5.076556829350185e-06, |
|
"loss": 2.5255, |
|
"step": 1483 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.5260028037895784, |
|
"learning_rate": 5.071277418943807e-06, |
|
"loss": 2.5031, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.490261031562141, |
|
"learning_rate": 5.065997929053795e-06, |
|
"loss": 2.6002, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.48382156811125154, |
|
"learning_rate": 5.060718365567467e-06, |
|
"loss": 2.536, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.5075473973733333, |
|
"learning_rate": 5.055438734372225e-06, |
|
"loss": 2.6029, |
|
"step": 1487 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.5325905771186163, |
|
"learning_rate": 5.050159041355551e-06, |
|
"loss": 2.6337, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.5030898644635945, |
|
"learning_rate": 5.04487929240499e-06, |
|
"loss": 2.5812, |
|
"step": 1489 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.49584254715744086, |
|
"learning_rate": 5.039599493408154e-06, |
|
"loss": 2.6302, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.508878791496978, |
|
"learning_rate": 5.034319650252707e-06, |
|
"loss": 2.4956, |
|
"step": 1491 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.4751330183145929, |
|
"learning_rate": 5.029039768826366e-06, |
|
"loss": 2.4993, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.5074354679439569, |
|
"learning_rate": 5.023759855016886e-06, |
|
"loss": 2.5129, |
|
"step": 1493 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.5178318193661343, |
|
"learning_rate": 5.018479914712065e-06, |
|
"loss": 2.4461, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.4931500423440274, |
|
"learning_rate": 5.0131999537997235e-06, |
|
"loss": 2.4936, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.5045471846685965, |
|
"learning_rate": 5.007919978167709e-06, |
|
"loss": 2.64, |
|
"step": 1496 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 2985, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 374, |
|
"total_flos": 2.2320632728765071e+18, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|