|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0997923404926186, |
|
"eval_steps": 500, |
|
"global_step": 872, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 3.5686, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 3.6127, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 3.5707, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 3.59, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 3.5644, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 3.5914, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 3.5969, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 3.586, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 3.5967, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 3.6217, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 3.5745, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 3.5426, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 3.5519, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 3.5843, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 3.6121, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 3.6276, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 3.6146, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 3.5983, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 3.613, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 3.5787, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 3.5984, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 3.5756, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 77.93226623535156, |
|
"learning_rate": 1.1363636363636364e-07, |
|
"loss": 3.5773, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 77.64237976074219, |
|
"learning_rate": 2.2727272727272729e-07, |
|
"loss": 3.5666, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 77.94294738769531, |
|
"learning_rate": 3.409090909090909e-07, |
|
"loss": 3.5691, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 77.58763885498047, |
|
"learning_rate": 4.5454545454545457e-07, |
|
"loss": 3.5667, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 77.7476577758789, |
|
"learning_rate": 5.681818181818182e-07, |
|
"loss": 3.5572, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 75.63665008544922, |
|
"learning_rate": 6.818181818181818e-07, |
|
"loss": 3.4797, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 74.8890380859375, |
|
"learning_rate": 7.954545454545455e-07, |
|
"loss": 3.4472, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 69.56644439697266, |
|
"learning_rate": 9.090909090909091e-07, |
|
"loss": 3.2339, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 67.08919525146484, |
|
"learning_rate": 1.0227272727272729e-06, |
|
"loss": 3.1385, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 48.23773956298828, |
|
"learning_rate": 1.1363636363636364e-06, |
|
"loss": 2.4925, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 45.45056915283203, |
|
"learning_rate": 1.25e-06, |
|
"loss": 2.4319, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 38.1535758972168, |
|
"learning_rate": 1.3636363636363636e-06, |
|
"loss": 2.2507, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 35.65153503417969, |
|
"learning_rate": 1.4772727272727275e-06, |
|
"loss": 2.1687, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 8.733210563659668, |
|
"learning_rate": 1.590909090909091e-06, |
|
"loss": 1.7378, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 8.957545280456543, |
|
"learning_rate": 1.7045454545454546e-06, |
|
"loss": 1.6735, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.594935417175293, |
|
"learning_rate": 1.8181818181818183e-06, |
|
"loss": 1.5907, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.634173393249512, |
|
"learning_rate": 1.931818181818182e-06, |
|
"loss": 1.5384, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.905609130859375, |
|
"learning_rate": 2.0454545454545457e-06, |
|
"loss": 1.5125, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.0466458797454834, |
|
"learning_rate": 2.1590909090909092e-06, |
|
"loss": 1.4422, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.975010633468628, |
|
"learning_rate": 2.2727272727272728e-06, |
|
"loss": 1.4561, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.3392484188079834, |
|
"learning_rate": 2.3863636363636367e-06, |
|
"loss": 1.4086, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.107882261276245, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.4129, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.7513774633407593, |
|
"learning_rate": 2.6136363636363637e-06, |
|
"loss": 1.3709, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.5137622356414795, |
|
"learning_rate": 2.7272727272727272e-06, |
|
"loss": 1.3971, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.5218021869659424, |
|
"learning_rate": 2.8409090909090916e-06, |
|
"loss": 1.3869, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.3554155826568604, |
|
"learning_rate": 2.954545454545455e-06, |
|
"loss": 1.3618, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.2151788473129272, |
|
"learning_rate": 3.0681818181818186e-06, |
|
"loss": 1.3641, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.1324663162231445, |
|
"learning_rate": 3.181818181818182e-06, |
|
"loss": 1.3731, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.0742617845535278, |
|
"learning_rate": 3.2954545454545456e-06, |
|
"loss": 1.3624, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.1047883033752441, |
|
"learning_rate": 3.409090909090909e-06, |
|
"loss": 1.3796, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.1034663915634155, |
|
"learning_rate": 3.522727272727273e-06, |
|
"loss": 1.3592, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.0702307224273682, |
|
"learning_rate": 3.6363636363636366e-06, |
|
"loss": 1.3698, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.0497913360595703, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 1.368, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.0903385877609253, |
|
"learning_rate": 3.863636363636364e-06, |
|
"loss": 1.3601, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.0922733545303345, |
|
"learning_rate": 3.9772727272727275e-06, |
|
"loss": 1.3738, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.0606937408447266, |
|
"learning_rate": 4.0909090909090915e-06, |
|
"loss": 1.3495, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.0293612480163574, |
|
"learning_rate": 4.204545454545455e-06, |
|
"loss": 1.3531, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.081424355506897, |
|
"learning_rate": 4.3181818181818185e-06, |
|
"loss": 1.3758, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.160018801689148, |
|
"learning_rate": 4.4318181818181824e-06, |
|
"loss": 1.3745, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.1710997819900513, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 1.391, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.1680262088775635, |
|
"learning_rate": 4.6590909090909095e-06, |
|
"loss": 1.3737, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.142930507659912, |
|
"learning_rate": 4.772727272727273e-06, |
|
"loss": 1.4017, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.087868332862854, |
|
"learning_rate": 4.8863636363636365e-06, |
|
"loss": 1.3784, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.1250782012939453, |
|
"learning_rate": 5e-06, |
|
"loss": 1.3761, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.1870965957641602, |
|
"learning_rate": 5.113636363636364e-06, |
|
"loss": 1.3941, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.1462435722351074, |
|
"learning_rate": 5.2272727272727274e-06, |
|
"loss": 1.3671, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.1489235162734985, |
|
"learning_rate": 5.340909090909091e-06, |
|
"loss": 1.3945, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.0989032983779907, |
|
"learning_rate": 5.4545454545454545e-06, |
|
"loss": 1.3684, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.1220042705535889, |
|
"learning_rate": 5.568181818181818e-06, |
|
"loss": 1.3874, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.0458035469055176, |
|
"learning_rate": 5.681818181818183e-06, |
|
"loss": 1.4101, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.0890356302261353, |
|
"learning_rate": 5.795454545454546e-06, |
|
"loss": 1.4111, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.0306185483932495, |
|
"learning_rate": 5.90909090909091e-06, |
|
"loss": 1.375, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.0304069519042969, |
|
"learning_rate": 6.022727272727273e-06, |
|
"loss": 1.397, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.0588593482971191, |
|
"learning_rate": 6.136363636363637e-06, |
|
"loss": 1.4001, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.1896718740463257, |
|
"learning_rate": 6.25e-06, |
|
"loss": 1.387, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.163329005241394, |
|
"learning_rate": 6.363636363636364e-06, |
|
"loss": 1.4085, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.0310306549072266, |
|
"learning_rate": 6.477272727272727e-06, |
|
"loss": 1.3856, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.047789216041565, |
|
"learning_rate": 6.590909090909091e-06, |
|
"loss": 1.4072, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.0043468475341797, |
|
"learning_rate": 6.704545454545454e-06, |
|
"loss": 1.3974, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.064086675643921, |
|
"learning_rate": 6.818181818181818e-06, |
|
"loss": 1.4015, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.1652333736419678, |
|
"learning_rate": 6.931818181818183e-06, |
|
"loss": 1.4042, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.0301768779754639, |
|
"learning_rate": 7.045454545454546e-06, |
|
"loss": 1.3901, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.0952802896499634, |
|
"learning_rate": 7.15909090909091e-06, |
|
"loss": 1.3807, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.165692925453186, |
|
"learning_rate": 7.272727272727273e-06, |
|
"loss": 1.3928, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.1395169496536255, |
|
"learning_rate": 7.386363636363637e-06, |
|
"loss": 1.4028, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.1742874383926392, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 1.4118, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.3489391803741455, |
|
"learning_rate": 7.613636363636364e-06, |
|
"loss": 1.3887, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.2398784160614014, |
|
"learning_rate": 7.727272727272727e-06, |
|
"loss": 1.383, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.3594183921813965, |
|
"learning_rate": 7.840909090909091e-06, |
|
"loss": 1.4146, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.2309668064117432, |
|
"learning_rate": 7.954545454545455e-06, |
|
"loss": 1.4056, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.1133555173873901, |
|
"learning_rate": 8.068181818181819e-06, |
|
"loss": 1.396, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.2161476612091064, |
|
"learning_rate": 8.181818181818183e-06, |
|
"loss": 1.4003, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.2214394807815552, |
|
"learning_rate": 8.295454545454547e-06, |
|
"loss": 1.3861, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.2064229249954224, |
|
"learning_rate": 8.40909090909091e-06, |
|
"loss": 1.4051, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.1662726402282715, |
|
"learning_rate": 8.522727272727273e-06, |
|
"loss": 1.3758, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.295553207397461, |
|
"learning_rate": 8.636363636363637e-06, |
|
"loss": 1.4256, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.2915432453155518, |
|
"learning_rate": 8.750000000000001e-06, |
|
"loss": 1.3965, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.1948374509811401, |
|
"learning_rate": 8.863636363636365e-06, |
|
"loss": 1.4115, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.1821595430374146, |
|
"learning_rate": 8.977272727272727e-06, |
|
"loss": 1.3967, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.151677131652832, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 1.3866, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.2359035015106201, |
|
"learning_rate": 9.204545454545455e-06, |
|
"loss": 1.4052, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.4146509170532227, |
|
"learning_rate": 9.318181818181819e-06, |
|
"loss": 1.3826, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.2139616012573242, |
|
"learning_rate": 9.431818181818183e-06, |
|
"loss": 1.3798, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.205654263496399, |
|
"learning_rate": 9.545454545454547e-06, |
|
"loss": 1.3977, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.243720293045044, |
|
"learning_rate": 9.65909090909091e-06, |
|
"loss": 1.3886, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.4605085849761963, |
|
"learning_rate": 9.772727272727273e-06, |
|
"loss": 1.372, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.4088302850723267, |
|
"learning_rate": 9.886363636363637e-06, |
|
"loss": 1.391, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.425614595413208, |
|
"learning_rate": 1e-05, |
|
"loss": 1.42, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.3304733037948608, |
|
"learning_rate": 9.987244897959184e-06, |
|
"loss": 1.3856, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.3015329837799072, |
|
"learning_rate": 9.974489795918369e-06, |
|
"loss": 1.3929, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.3881584405899048, |
|
"learning_rate": 9.961734693877552e-06, |
|
"loss": 1.3913, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.35237455368042, |
|
"learning_rate": 9.948979591836737e-06, |
|
"loss": 1.4069, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.2924104928970337, |
|
"learning_rate": 9.93622448979592e-06, |
|
"loss": 1.3928, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.3976210355758667, |
|
"learning_rate": 9.923469387755103e-06, |
|
"loss": 1.3904, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.1047642230987549, |
|
"learning_rate": 9.910714285714288e-06, |
|
"loss": 1.3675, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.0460392236709595, |
|
"learning_rate": 9.89795918367347e-06, |
|
"loss": 1.3787, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.1290128231048584, |
|
"learning_rate": 9.885204081632654e-06, |
|
"loss": 1.3997, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.1596468687057495, |
|
"learning_rate": 9.872448979591838e-06, |
|
"loss": 1.3838, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.18632972240448, |
|
"learning_rate": 9.859693877551022e-06, |
|
"loss": 1.3773, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.2686792612075806, |
|
"learning_rate": 9.846938775510205e-06, |
|
"loss": 1.3823, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.2351279258728027, |
|
"learning_rate": 9.834183673469388e-06, |
|
"loss": 1.3993, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.3058531284332275, |
|
"learning_rate": 9.821428571428573e-06, |
|
"loss": 1.362, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.3058429956436157, |
|
"learning_rate": 9.808673469387756e-06, |
|
"loss": 1.3728, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.3963571786880493, |
|
"learning_rate": 9.795918367346939e-06, |
|
"loss": 1.3628, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.2370892763137817, |
|
"learning_rate": 9.783163265306123e-06, |
|
"loss": 1.3771, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.1372427940368652, |
|
"learning_rate": 9.770408163265307e-06, |
|
"loss": 1.3715, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.206458330154419, |
|
"learning_rate": 9.75765306122449e-06, |
|
"loss": 1.3791, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.201851487159729, |
|
"learning_rate": 9.744897959183674e-06, |
|
"loss": 1.3547, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.182597041130066, |
|
"learning_rate": 9.732142857142858e-06, |
|
"loss": 1.3697, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.1894217729568481, |
|
"learning_rate": 9.719387755102042e-06, |
|
"loss": 1.3512, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.448066234588623, |
|
"learning_rate": 9.706632653061225e-06, |
|
"loss": 1.3765, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.1208083629608154, |
|
"learning_rate": 9.693877551020408e-06, |
|
"loss": 1.3749, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.0563664436340332, |
|
"learning_rate": 9.681122448979593e-06, |
|
"loss": 1.352, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.41444993019104, |
|
"learning_rate": 9.668367346938776e-06, |
|
"loss": 1.3603, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.186637043952942, |
|
"learning_rate": 9.65561224489796e-06, |
|
"loss": 1.3805, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.2297229766845703, |
|
"learning_rate": 9.642857142857144e-06, |
|
"loss": 1.3559, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.3162171840667725, |
|
"learning_rate": 9.630102040816327e-06, |
|
"loss": 1.3651, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.358998417854309, |
|
"learning_rate": 9.617346938775512e-06, |
|
"loss": 1.3841, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.2178298234939575, |
|
"learning_rate": 9.604591836734695e-06, |
|
"loss": 1.3206, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.3197548389434814, |
|
"learning_rate": 9.591836734693878e-06, |
|
"loss": 1.3747, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.2247390747070312, |
|
"learning_rate": 9.579081632653063e-06, |
|
"loss": 1.3524, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.2377766370773315, |
|
"learning_rate": 9.566326530612246e-06, |
|
"loss": 1.376, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.1804473400115967, |
|
"learning_rate": 9.55357142857143e-06, |
|
"loss": 1.3553, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.2282185554504395, |
|
"learning_rate": 9.540816326530612e-06, |
|
"loss": 1.3956, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.222644567489624, |
|
"learning_rate": 9.528061224489797e-06, |
|
"loss": 1.3485, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.0799063444137573, |
|
"learning_rate": 9.51530612244898e-06, |
|
"loss": 1.3462, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.0759501457214355, |
|
"learning_rate": 9.502551020408163e-06, |
|
"loss": 1.3571, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.391052007675171, |
|
"learning_rate": 9.489795918367348e-06, |
|
"loss": 1.3797, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.2906898260116577, |
|
"learning_rate": 9.477040816326531e-06, |
|
"loss": 1.3281, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.2738357782363892, |
|
"learning_rate": 9.464285714285714e-06, |
|
"loss": 1.3648, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.4676612615585327, |
|
"learning_rate": 9.451530612244899e-06, |
|
"loss": 1.3382, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.2316924333572388, |
|
"learning_rate": 9.438775510204082e-06, |
|
"loss": 1.3448, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.183659315109253, |
|
"learning_rate": 9.426020408163265e-06, |
|
"loss": 1.3405, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.2155417203903198, |
|
"learning_rate": 9.41326530612245e-06, |
|
"loss": 1.3744, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.2606340646743774, |
|
"learning_rate": 9.400510204081633e-06, |
|
"loss": 1.3449, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.1220340728759766, |
|
"learning_rate": 9.387755102040818e-06, |
|
"loss": 1.3704, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.1462124586105347, |
|
"learning_rate": 9.375000000000001e-06, |
|
"loss": 1.349, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.0098296403884888, |
|
"learning_rate": 9.362244897959184e-06, |
|
"loss": 1.3564, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.06204354763031, |
|
"learning_rate": 9.349489795918369e-06, |
|
"loss": 1.3233, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.0313581228256226, |
|
"learning_rate": 9.336734693877552e-06, |
|
"loss": 1.335, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.0908300876617432, |
|
"learning_rate": 9.323979591836737e-06, |
|
"loss": 1.3394, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.1179544925689697, |
|
"learning_rate": 9.31122448979592e-06, |
|
"loss": 1.3774, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.416581630706787, |
|
"learning_rate": 9.298469387755103e-06, |
|
"loss": 1.3598, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.228285312652588, |
|
"learning_rate": 9.285714285714288e-06, |
|
"loss": 1.3448, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.2312650680541992, |
|
"learning_rate": 9.27295918367347e-06, |
|
"loss": 1.3408, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.2846862077713013, |
|
"learning_rate": 9.260204081632654e-06, |
|
"loss": 1.3746, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.4658101797103882, |
|
"learning_rate": 9.247448979591837e-06, |
|
"loss": 1.3501, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.335235357284546, |
|
"learning_rate": 9.234693877551022e-06, |
|
"loss": 1.3394, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.367955207824707, |
|
"learning_rate": 9.221938775510205e-06, |
|
"loss": 1.3385, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.3143895864486694, |
|
"learning_rate": 9.209183673469388e-06, |
|
"loss": 1.3301, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.4805288314819336, |
|
"learning_rate": 9.196428571428571e-06, |
|
"loss": 1.3193, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.3719357252120972, |
|
"learning_rate": 9.183673469387756e-06, |
|
"loss": 1.345, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.4281933307647705, |
|
"learning_rate": 9.170918367346939e-06, |
|
"loss": 1.3412, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.3063050508499146, |
|
"learning_rate": 9.158163265306124e-06, |
|
"loss": 1.3251, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.3859777450561523, |
|
"learning_rate": 9.145408163265307e-06, |
|
"loss": 1.3515, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.2141677141189575, |
|
"learning_rate": 9.13265306122449e-06, |
|
"loss": 1.333, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.2974693775177002, |
|
"learning_rate": 9.119897959183674e-06, |
|
"loss": 1.3433, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.1979682445526123, |
|
"learning_rate": 9.107142857142858e-06, |
|
"loss": 1.35, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.1833395957946777, |
|
"learning_rate": 9.094387755102042e-06, |
|
"loss": 1.3202, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.1826756000518799, |
|
"learning_rate": 9.081632653061225e-06, |
|
"loss": 1.3266, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.306878685951233, |
|
"learning_rate": 9.068877551020409e-06, |
|
"loss": 1.3459, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.3741576671600342, |
|
"learning_rate": 9.056122448979593e-06, |
|
"loss": 1.3479, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.4168845415115356, |
|
"learning_rate": 9.043367346938776e-06, |
|
"loss": 1.3221, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.4802296161651611, |
|
"learning_rate": 9.03061224489796e-06, |
|
"loss": 1.3377, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.3033307790756226, |
|
"learning_rate": 9.017857142857144e-06, |
|
"loss": 1.3219, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.3650187253952026, |
|
"learning_rate": 9.005102040816327e-06, |
|
"loss": 1.3257, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.263314962387085, |
|
"learning_rate": 8.992346938775512e-06, |
|
"loss": 1.3142, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.2871077060699463, |
|
"learning_rate": 8.979591836734695e-06, |
|
"loss": 1.3418, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.2486342191696167, |
|
"learning_rate": 8.966836734693878e-06, |
|
"loss": 1.311, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.309653878211975, |
|
"learning_rate": 8.954081632653061e-06, |
|
"loss": 1.3147, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.2959657907485962, |
|
"learning_rate": 8.941326530612246e-06, |
|
"loss": 1.3277, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.2131139039993286, |
|
"learning_rate": 8.92857142857143e-06, |
|
"loss": 1.3251, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.2249537706375122, |
|
"learning_rate": 8.915816326530612e-06, |
|
"loss": 1.3304, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1446560621261597, |
|
"learning_rate": 8.903061224489795e-06, |
|
"loss": 1.3042, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1486533880233765, |
|
"learning_rate": 8.89030612244898e-06, |
|
"loss": 1.3105, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.113291621208191, |
|
"learning_rate": 8.877551020408163e-06, |
|
"loss": 1.3226, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.1082658767700195, |
|
"learning_rate": 8.864795918367348e-06, |
|
"loss": 1.3176, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.0742919445037842, |
|
"learning_rate": 8.852040816326531e-06, |
|
"loss": 1.3221, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.058107614517212, |
|
"learning_rate": 8.839285714285714e-06, |
|
"loss": 1.3176, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.05449640750885, |
|
"learning_rate": 8.826530612244899e-06, |
|
"loss": 1.3188, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.019459843635559, |
|
"learning_rate": 8.813775510204082e-06, |
|
"loss": 1.3058, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.0865976810455322, |
|
"learning_rate": 8.801020408163265e-06, |
|
"loss": 1.3143, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.0287652015686035, |
|
"learning_rate": 8.78826530612245e-06, |
|
"loss": 1.3002, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.0019289255142212, |
|
"learning_rate": 8.775510204081633e-06, |
|
"loss": 1.3229, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.9947201013565063, |
|
"learning_rate": 8.762755102040818e-06, |
|
"loss": 1.3127, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.0613619089126587, |
|
"learning_rate": 8.750000000000001e-06, |
|
"loss": 1.3231, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.1687304973602295, |
|
"learning_rate": 8.737244897959184e-06, |
|
"loss": 1.3026, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.2115646600723267, |
|
"learning_rate": 8.724489795918369e-06, |
|
"loss": 1.3363, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.2404974699020386, |
|
"learning_rate": 8.711734693877552e-06, |
|
"loss": 1.3328, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.2207895517349243, |
|
"learning_rate": 8.698979591836737e-06, |
|
"loss": 1.3003, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.3173328638076782, |
|
"learning_rate": 8.68622448979592e-06, |
|
"loss": 1.3428, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.3301403522491455, |
|
"learning_rate": 8.673469387755103e-06, |
|
"loss": 1.2952, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.389487385749817, |
|
"learning_rate": 8.660714285714286e-06, |
|
"loss": 1.311, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.4389171600341797, |
|
"learning_rate": 8.64795918367347e-06, |
|
"loss": 1.3105, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.4631147384643555, |
|
"learning_rate": 8.635204081632654e-06, |
|
"loss": 1.3253, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.3370922803878784, |
|
"learning_rate": 8.622448979591837e-06, |
|
"loss": 1.2933, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.4107997417449951, |
|
"learning_rate": 8.609693877551022e-06, |
|
"loss": 1.297, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.1963098049163818, |
|
"learning_rate": 8.596938775510205e-06, |
|
"loss": 1.3093, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.245422124862671, |
|
"learning_rate": 8.584183673469388e-06, |
|
"loss": 1.2944, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.1765896081924438, |
|
"learning_rate": 8.571428571428571e-06, |
|
"loss": 1.3108, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.3151187896728516, |
|
"learning_rate": 8.558673469387756e-06, |
|
"loss": 1.3015, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.1630090475082397, |
|
"learning_rate": 8.545918367346939e-06, |
|
"loss": 1.2917, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.2731425762176514, |
|
"learning_rate": 8.533163265306124e-06, |
|
"loss": 1.299, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.2473809719085693, |
|
"learning_rate": 8.520408163265307e-06, |
|
"loss": 1.2883, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.2504627704620361, |
|
"learning_rate": 8.50765306122449e-06, |
|
"loss": 1.3029, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.124976634979248, |
|
"learning_rate": 8.494897959183675e-06, |
|
"loss": 1.3063, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.2146427631378174, |
|
"learning_rate": 8.482142857142858e-06, |
|
"loss": 1.3139, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.1318598985671997, |
|
"learning_rate": 8.469387755102042e-06, |
|
"loss": 1.3245, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.175479769706726, |
|
"learning_rate": 8.456632653061225e-06, |
|
"loss": 1.2904, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.06119966506958, |
|
"learning_rate": 8.443877551020409e-06, |
|
"loss": 1.2826, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.0992507934570312, |
|
"learning_rate": 8.431122448979593e-06, |
|
"loss": 1.3068, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.0282840728759766, |
|
"learning_rate": 8.418367346938776e-06, |
|
"loss": 1.2794, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.0799790620803833, |
|
"learning_rate": 8.40561224489796e-06, |
|
"loss": 1.2963, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.9920130968093872, |
|
"learning_rate": 8.392857142857144e-06, |
|
"loss": 1.2985, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.9600382447242737, |
|
"learning_rate": 8.380102040816327e-06, |
|
"loss": 1.3017, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.0030544996261597, |
|
"learning_rate": 8.36734693877551e-06, |
|
"loss": 1.2824, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.0658328533172607, |
|
"learning_rate": 8.354591836734695e-06, |
|
"loss": 1.2738, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.0285568237304688, |
|
"learning_rate": 8.341836734693878e-06, |
|
"loss": 1.2932, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.1148922443389893, |
|
"learning_rate": 8.329081632653061e-06, |
|
"loss": 1.2727, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.0424553155899048, |
|
"learning_rate": 8.316326530612246e-06, |
|
"loss": 1.2917, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.09824538230896, |
|
"learning_rate": 8.30357142857143e-06, |
|
"loss": 1.2861, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.1441254615783691, |
|
"learning_rate": 8.290816326530612e-06, |
|
"loss": 1.2894, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.2667628526687622, |
|
"learning_rate": 8.278061224489795e-06, |
|
"loss": 1.2738, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.3604241609573364, |
|
"learning_rate": 8.26530612244898e-06, |
|
"loss": 1.2889, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.3580222129821777, |
|
"learning_rate": 8.252551020408163e-06, |
|
"loss": 1.3088, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.2741780281066895, |
|
"learning_rate": 8.239795918367348e-06, |
|
"loss": 1.2915, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.3847053050994873, |
|
"learning_rate": 8.227040816326531e-06, |
|
"loss": 1.3189, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.2596681118011475, |
|
"learning_rate": 8.214285714285714e-06, |
|
"loss": 1.2751, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.2427873611450195, |
|
"learning_rate": 8.201530612244899e-06, |
|
"loss": 1.2938, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.2609047889709473, |
|
"learning_rate": 8.188775510204082e-06, |
|
"loss": 1.2948, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.2723376750946045, |
|
"learning_rate": 8.176020408163265e-06, |
|
"loss": 1.2949, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.1737812757492065, |
|
"learning_rate": 8.16326530612245e-06, |
|
"loss": 1.2606, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.237030267715454, |
|
"learning_rate": 8.150510204081633e-06, |
|
"loss": 1.2747, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.162434697151184, |
|
"learning_rate": 8.137755102040818e-06, |
|
"loss": 1.2841, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.2295739650726318, |
|
"learning_rate": 8.125000000000001e-06, |
|
"loss": 1.2922, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.1049543619155884, |
|
"learning_rate": 8.112244897959184e-06, |
|
"loss": 1.2724, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.1978216171264648, |
|
"learning_rate": 8.099489795918369e-06, |
|
"loss": 1.294, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.0655226707458496, |
|
"learning_rate": 8.086734693877552e-06, |
|
"loss": 1.2465, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.1613059043884277, |
|
"learning_rate": 8.073979591836735e-06, |
|
"loss": 1.2683, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.0504621267318726, |
|
"learning_rate": 8.06122448979592e-06, |
|
"loss": 1.2386, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.087553858757019, |
|
"learning_rate": 8.048469387755103e-06, |
|
"loss": 1.2491, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.946223258972168, |
|
"learning_rate": 8.035714285714286e-06, |
|
"loss": 1.2514, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.9961144328117371, |
|
"learning_rate": 8.02295918367347e-06, |
|
"loss": 1.2781, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.9003639221191406, |
|
"learning_rate": 8.010204081632654e-06, |
|
"loss": 1.2567, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.0217918157577515, |
|
"learning_rate": 7.997448979591837e-06, |
|
"loss": 1.2775, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.002084732055664, |
|
"learning_rate": 7.98469387755102e-06, |
|
"loss": 1.2564, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.1564198732376099, |
|
"learning_rate": 7.971938775510205e-06, |
|
"loss": 1.2637, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.1586065292358398, |
|
"learning_rate": 7.959183673469388e-06, |
|
"loss": 1.2805, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.2958130836486816, |
|
"learning_rate": 7.946428571428571e-06, |
|
"loss": 1.2673, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.2151861190795898, |
|
"learning_rate": 7.933673469387756e-06, |
|
"loss": 1.2632, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.2508829832077026, |
|
"learning_rate": 7.920918367346939e-06, |
|
"loss": 1.265, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.1336225271224976, |
|
"learning_rate": 7.908163265306124e-06, |
|
"loss": 1.257, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.1592440605163574, |
|
"learning_rate": 7.895408163265307e-06, |
|
"loss": 1.2628, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.2461146116256714, |
|
"learning_rate": 7.88265306122449e-06, |
|
"loss": 1.2551, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.3664628267288208, |
|
"learning_rate": 7.869897959183675e-06, |
|
"loss": 1.2658, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.201025128364563, |
|
"learning_rate": 7.857142857142858e-06, |
|
"loss": 1.2746, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.1980972290039062, |
|
"learning_rate": 7.844387755102042e-06, |
|
"loss": 1.2641, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.2280415296554565, |
|
"learning_rate": 7.831632653061226e-06, |
|
"loss": 1.2813, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.1427682638168335, |
|
"learning_rate": 7.818877551020409e-06, |
|
"loss": 1.2531, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.162741780281067, |
|
"learning_rate": 7.806122448979593e-06, |
|
"loss": 1.2598, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.1492100954055786, |
|
"learning_rate": 7.793367346938776e-06, |
|
"loss": 1.257, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.2014518976211548, |
|
"learning_rate": 7.78061224489796e-06, |
|
"loss": 1.2553, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.1967196464538574, |
|
"learning_rate": 7.767857142857144e-06, |
|
"loss": 1.2496, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.2154698371887207, |
|
"learning_rate": 7.755102040816327e-06, |
|
"loss": 1.25, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.1646188497543335, |
|
"learning_rate": 7.74234693877551e-06, |
|
"loss": 1.2374, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.0902819633483887, |
|
"learning_rate": 7.729591836734695e-06, |
|
"loss": 1.2626, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 1.1101752519607544, |
|
"learning_rate": 7.716836734693878e-06, |
|
"loss": 1.2402, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.0915471315383911, |
|
"learning_rate": 7.704081632653061e-06, |
|
"loss": 1.2652, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.099541425704956, |
|
"learning_rate": 7.691326530612245e-06, |
|
"loss": 1.2388, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.0305002927780151, |
|
"learning_rate": 7.67857142857143e-06, |
|
"loss": 1.2598, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.1155153512954712, |
|
"learning_rate": 7.665816326530612e-06, |
|
"loss": 1.2334, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.0462146997451782, |
|
"learning_rate": 7.653061224489796e-06, |
|
"loss": 1.2448, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.0846874713897705, |
|
"learning_rate": 7.64030612244898e-06, |
|
"loss": 1.248, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.9629496335983276, |
|
"learning_rate": 7.627551020408163e-06, |
|
"loss": 1.2395, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 1.0308563709259033, |
|
"learning_rate": 7.614795918367348e-06, |
|
"loss": 1.2356, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.026279091835022, |
|
"learning_rate": 7.602040816326531e-06, |
|
"loss": 1.2351, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.0194913148880005, |
|
"learning_rate": 7.589285714285714e-06, |
|
"loss": 1.2461, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.9419485330581665, |
|
"learning_rate": 7.576530612244899e-06, |
|
"loss": 1.269, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.0491052865982056, |
|
"learning_rate": 7.563775510204082e-06, |
|
"loss": 1.2414, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.0324506759643555, |
|
"learning_rate": 7.551020408163265e-06, |
|
"loss": 1.2419, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.083568811416626, |
|
"learning_rate": 7.53826530612245e-06, |
|
"loss": 1.2263, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.0389920473098755, |
|
"learning_rate": 7.525510204081633e-06, |
|
"loss": 1.269, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 1.1106739044189453, |
|
"learning_rate": 7.512755102040817e-06, |
|
"loss": 1.2248, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.0161393880844116, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 1.2392, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.0761648416519165, |
|
"learning_rate": 7.487244897959184e-06, |
|
"loss": 1.2351, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.024794340133667, |
|
"learning_rate": 7.474489795918368e-06, |
|
"loss": 1.2514, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.1029651165008545, |
|
"learning_rate": 7.461734693877551e-06, |
|
"loss": 1.2457, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.0931360721588135, |
|
"learning_rate": 7.448979591836736e-06, |
|
"loss": 1.236, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.081899642944336, |
|
"learning_rate": 7.436224489795919e-06, |
|
"loss": 1.2381, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.0580326318740845, |
|
"learning_rate": 7.423469387755102e-06, |
|
"loss": 1.2449, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 1.1197216510772705, |
|
"learning_rate": 7.410714285714287e-06, |
|
"loss": 1.2424, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.0201445817947388, |
|
"learning_rate": 7.39795918367347e-06, |
|
"loss": 1.2405, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.017836093902588, |
|
"learning_rate": 7.385204081632653e-06, |
|
"loss": 1.2466, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.9251910448074341, |
|
"learning_rate": 7.372448979591838e-06, |
|
"loss": 1.2306, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.9502259492874146, |
|
"learning_rate": 7.359693877551021e-06, |
|
"loss": 1.2475, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.9273219704627991, |
|
"learning_rate": 7.346938775510205e-06, |
|
"loss": 1.2385, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.9324339628219604, |
|
"learning_rate": 7.334183673469388e-06, |
|
"loss": 1.238, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.9171964526176453, |
|
"learning_rate": 7.321428571428572e-06, |
|
"loss": 1.2172, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.9358417987823486, |
|
"learning_rate": 7.308673469387756e-06, |
|
"loss": 1.2216, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.9297531843185425, |
|
"learning_rate": 7.295918367346939e-06, |
|
"loss": 1.2133, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.9529639482498169, |
|
"learning_rate": 7.283163265306124e-06, |
|
"loss": 1.2386, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.9522646069526672, |
|
"learning_rate": 7.270408163265307e-06, |
|
"loss": 1.2225, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.9964836835861206, |
|
"learning_rate": 7.25765306122449e-06, |
|
"loss": 1.2449, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.9964749813079834, |
|
"learning_rate": 7.244897959183675e-06, |
|
"loss": 1.2212, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.9914069175720215, |
|
"learning_rate": 7.232142857142858e-06, |
|
"loss": 1.2384, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.9886379837989807, |
|
"learning_rate": 7.219387755102042e-06, |
|
"loss": 1.2302, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 1.0108413696289062, |
|
"learning_rate": 7.206632653061226e-06, |
|
"loss": 1.2077, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0429081916809082, |
|
"learning_rate": 7.193877551020409e-06, |
|
"loss": 1.2305, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.06741464138031, |
|
"learning_rate": 7.181122448979593e-06, |
|
"loss": 1.2352, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0488401651382446, |
|
"learning_rate": 7.168367346938776e-06, |
|
"loss": 1.2199, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.1095186471939087, |
|
"learning_rate": 7.15561224489796e-06, |
|
"loss": 1.2296, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.033311367034912, |
|
"learning_rate": 7.1428571428571436e-06, |
|
"loss": 1.2062, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0777385234832764, |
|
"learning_rate": 7.130102040816327e-06, |
|
"loss": 1.2072, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 1.0967751741409302, |
|
"learning_rate": 7.1173469387755114e-06, |
|
"loss": 1.2263, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.134886622428894, |
|
"learning_rate": 7.1045918367346945e-06, |
|
"loss": 1.2275, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.0573385953903198, |
|
"learning_rate": 7.091836734693878e-06, |
|
"loss": 1.2284, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.9954231977462769, |
|
"learning_rate": 7.079081632653062e-06, |
|
"loss": 1.218, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.9254037737846375, |
|
"learning_rate": 7.0663265306122455e-06, |
|
"loss": 1.2101, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.9205686450004578, |
|
"learning_rate": 7.053571428571429e-06, |
|
"loss": 1.2323, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.9159219264984131, |
|
"learning_rate": 7.0408163265306125e-06, |
|
"loss": 1.2403, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.9511287212371826, |
|
"learning_rate": 7.0280612244897964e-06, |
|
"loss": 1.2316, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.0230053663253784, |
|
"learning_rate": 7.01530612244898e-06, |
|
"loss": 1.2331, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.996154248714447, |
|
"learning_rate": 7.0025510204081634e-06, |
|
"loss": 1.2115, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.9797936677932739, |
|
"learning_rate": 6.989795918367348e-06, |
|
"loss": 1.2189, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0028291940689087, |
|
"learning_rate": 6.977040816326531e-06, |
|
"loss": 1.2397, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0954959392547607, |
|
"learning_rate": 6.964285714285714e-06, |
|
"loss": 1.218, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0602993965148926, |
|
"learning_rate": 6.951530612244899e-06, |
|
"loss": 1.2181, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0718706846237183, |
|
"learning_rate": 6.938775510204082e-06, |
|
"loss": 1.2171, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 1.0047320127487183, |
|
"learning_rate": 6.926020408163265e-06, |
|
"loss": 1.2092, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.9684332609176636, |
|
"learning_rate": 6.91326530612245e-06, |
|
"loss": 1.2095, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.8635539412498474, |
|
"learning_rate": 6.900510204081633e-06, |
|
"loss": 1.2299, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.9231078028678894, |
|
"learning_rate": 6.887755102040817e-06, |
|
"loss": 1.2219, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.8896716833114624, |
|
"learning_rate": 6.875e-06, |
|
"loss": 1.2223, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.922714114189148, |
|
"learning_rate": 6.862244897959184e-06, |
|
"loss": 1.1918, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.9075490832328796, |
|
"learning_rate": 6.849489795918368e-06, |
|
"loss": 1.2137, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.9079575538635254, |
|
"learning_rate": 6.836734693877551e-06, |
|
"loss": 1.1861, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.8482446670532227, |
|
"learning_rate": 6.823979591836736e-06, |
|
"loss": 1.2168, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.856937050819397, |
|
"learning_rate": 6.811224489795919e-06, |
|
"loss": 1.1923, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.8377067446708679, |
|
"learning_rate": 6.798469387755102e-06, |
|
"loss": 1.2248, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.8343848586082458, |
|
"learning_rate": 6.785714285714287e-06, |
|
"loss": 1.1971, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.8501905798912048, |
|
"learning_rate": 6.77295918367347e-06, |
|
"loss": 1.2159, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.9495564103126526, |
|
"learning_rate": 6.760204081632653e-06, |
|
"loss": 1.21, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.9224797487258911, |
|
"learning_rate": 6.747448979591837e-06, |
|
"loss": 1.219, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.9395394325256348, |
|
"learning_rate": 6.734693877551021e-06, |
|
"loss": 1.1939, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.9465500712394714, |
|
"learning_rate": 6.721938775510205e-06, |
|
"loss": 1.2176, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.9452735185623169, |
|
"learning_rate": 6.709183673469388e-06, |
|
"loss": 1.2025, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.9032958745956421, |
|
"learning_rate": 6.696428571428571e-06, |
|
"loss": 1.2151, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.9356412291526794, |
|
"learning_rate": 6.683673469387756e-06, |
|
"loss": 1.1981, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.9610426425933838, |
|
"learning_rate": 6.670918367346939e-06, |
|
"loss": 1.2212, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.0291876792907715, |
|
"learning_rate": 6.658163265306124e-06, |
|
"loss": 1.2206, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.0189391374588013, |
|
"learning_rate": 6.645408163265307e-06, |
|
"loss": 1.1959, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.9816426038742065, |
|
"learning_rate": 6.63265306122449e-06, |
|
"loss": 1.2019, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.9211484789848328, |
|
"learning_rate": 6.619897959183675e-06, |
|
"loss": 1.1775, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.944790244102478, |
|
"learning_rate": 6.607142857142858e-06, |
|
"loss": 1.1766, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.9401800036430359, |
|
"learning_rate": 6.594387755102042e-06, |
|
"loss": 1.2087, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.9268150925636292, |
|
"learning_rate": 6.581632653061225e-06, |
|
"loss": 1.196, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.9066458940505981, |
|
"learning_rate": 6.568877551020409e-06, |
|
"loss": 1.1832, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.9003369212150574, |
|
"learning_rate": 6.556122448979593e-06, |
|
"loss": 1.2205, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.842934250831604, |
|
"learning_rate": 6.543367346938776e-06, |
|
"loss": 1.1967, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.8342365622520447, |
|
"learning_rate": 6.530612244897959e-06, |
|
"loss": 1.1894, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.9125496745109558, |
|
"learning_rate": 6.517857142857144e-06, |
|
"loss": 1.2019, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.9107596278190613, |
|
"learning_rate": 6.505102040816327e-06, |
|
"loss": 1.2032, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.951339066028595, |
|
"learning_rate": 6.4923469387755115e-06, |
|
"loss": 1.1849, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.9225314259529114, |
|
"learning_rate": 6.4795918367346946e-06, |
|
"loss": 1.2101, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.9681543707847595, |
|
"learning_rate": 6.466836734693878e-06, |
|
"loss": 1.2233, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.9564622640609741, |
|
"learning_rate": 6.454081632653062e-06, |
|
"loss": 1.2006, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.9844996333122253, |
|
"learning_rate": 6.4413265306122455e-06, |
|
"loss": 1.2073, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.9823477268218994, |
|
"learning_rate": 6.4285714285714295e-06, |
|
"loss": 1.2221, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.9903162121772766, |
|
"learning_rate": 6.4158163265306125e-06, |
|
"loss": 1.1994, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.8821483850479126, |
|
"learning_rate": 6.403061224489796e-06, |
|
"loss": 1.1885, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.9244970679283142, |
|
"learning_rate": 6.39030612244898e-06, |
|
"loss": 1.2173, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.8394110798835754, |
|
"learning_rate": 6.3775510204081635e-06, |
|
"loss": 1.1996, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.9020444750785828, |
|
"learning_rate": 6.364795918367348e-06, |
|
"loss": 1.1937, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.8169025778770447, |
|
"learning_rate": 6.352040816326531e-06, |
|
"loss": 1.1994, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.8738958239555359, |
|
"learning_rate": 6.3392857142857145e-06, |
|
"loss": 1.1929, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.8744160532951355, |
|
"learning_rate": 6.326530612244899e-06, |
|
"loss": 1.183, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.819991946220398, |
|
"learning_rate": 6.313775510204082e-06, |
|
"loss": 1.1602, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.778607964515686, |
|
"learning_rate": 6.301020408163265e-06, |
|
"loss": 1.1865, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.8436089158058167, |
|
"learning_rate": 6.288265306122449e-06, |
|
"loss": 1.194, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.8169540166854858, |
|
"learning_rate": 6.275510204081633e-06, |
|
"loss": 1.193, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.8512654304504395, |
|
"learning_rate": 6.262755102040817e-06, |
|
"loss": 1.2183, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.8186187148094177, |
|
"learning_rate": 6.25e-06, |
|
"loss": 1.1844, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.801342248916626, |
|
"learning_rate": 6.237244897959183e-06, |
|
"loss": 1.1791, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.8259813785552979, |
|
"learning_rate": 6.224489795918368e-06, |
|
"loss": 1.2031, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.8373045325279236, |
|
"learning_rate": 6.211734693877551e-06, |
|
"loss": 1.1881, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.819210410118103, |
|
"learning_rate": 6.198979591836736e-06, |
|
"loss": 1.1898, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.824668824672699, |
|
"learning_rate": 6.186224489795919e-06, |
|
"loss": 1.1845, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.8842679858207703, |
|
"learning_rate": 6.173469387755102e-06, |
|
"loss": 1.2051, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.798000156879425, |
|
"learning_rate": 6.160714285714286e-06, |
|
"loss": 1.1926, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.7511589527130127, |
|
"learning_rate": 6.14795918367347e-06, |
|
"loss": 1.1949, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.7895238399505615, |
|
"learning_rate": 6.135204081632653e-06, |
|
"loss": 1.1733, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.8007684946060181, |
|
"learning_rate": 6.122448979591837e-06, |
|
"loss": 1.1655, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.7976680994033813, |
|
"learning_rate": 6.109693877551021e-06, |
|
"loss": 1.198, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.7595086097717285, |
|
"learning_rate": 6.096938775510205e-06, |
|
"loss": 1.1914, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.8117203116416931, |
|
"learning_rate": 6.084183673469388e-06, |
|
"loss": 1.183, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.7925319075584412, |
|
"learning_rate": 6.071428571428571e-06, |
|
"loss": 1.1591, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.9027591347694397, |
|
"learning_rate": 6.058673469387756e-06, |
|
"loss": 1.1888, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.7940754294395447, |
|
"learning_rate": 6.045918367346939e-06, |
|
"loss": 1.1827, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.7836580872535706, |
|
"learning_rate": 6.033163265306124e-06, |
|
"loss": 1.1835, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.7472612261772156, |
|
"learning_rate": 6.020408163265307e-06, |
|
"loss": 1.1847, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.7687056660652161, |
|
"learning_rate": 6.00765306122449e-06, |
|
"loss": 1.1722, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.7652135491371155, |
|
"learning_rate": 5.994897959183674e-06, |
|
"loss": 1.1831, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.7814399600028992, |
|
"learning_rate": 5.982142857142858e-06, |
|
"loss": 1.1723, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.7041035890579224, |
|
"learning_rate": 5.969387755102042e-06, |
|
"loss": 1.1923, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.7865569591522217, |
|
"learning_rate": 5.956632653061225e-06, |
|
"loss": 1.1857, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.8004493117332458, |
|
"learning_rate": 5.943877551020408e-06, |
|
"loss": 1.1754, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.8099337220191956, |
|
"learning_rate": 5.931122448979593e-06, |
|
"loss": 1.1568, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.7906381487846375, |
|
"learning_rate": 5.918367346938776e-06, |
|
"loss": 1.1888, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.8648595213890076, |
|
"learning_rate": 5.905612244897959e-06, |
|
"loss": 1.1801, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.8521154522895813, |
|
"learning_rate": 5.892857142857144e-06, |
|
"loss": 1.1745, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.8795692324638367, |
|
"learning_rate": 5.880102040816327e-06, |
|
"loss": 1.1898, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.8522396087646484, |
|
"learning_rate": 5.867346938775511e-06, |
|
"loss": 1.2022, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.8316823244094849, |
|
"learning_rate": 5.854591836734695e-06, |
|
"loss": 1.1899, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.8392115235328674, |
|
"learning_rate": 5.841836734693878e-06, |
|
"loss": 1.1811, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.8328803181648254, |
|
"learning_rate": 5.829081632653062e-06, |
|
"loss": 1.1769, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.7763929963111877, |
|
"learning_rate": 5.816326530612246e-06, |
|
"loss": 1.1663, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.7515134215354919, |
|
"learning_rate": 5.8035714285714295e-06, |
|
"loss": 1.1749, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.7351372241973877, |
|
"learning_rate": 5.790816326530613e-06, |
|
"loss": 1.1696, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.7608625292778015, |
|
"learning_rate": 5.778061224489796e-06, |
|
"loss": 1.1893, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.7726980447769165, |
|
"learning_rate": 5.7653061224489805e-06, |
|
"loss": 1.1992, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.783970057964325, |
|
"learning_rate": 5.7525510204081636e-06, |
|
"loss": 1.1791, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.7531319856643677, |
|
"learning_rate": 5.739795918367348e-06, |
|
"loss": 1.183, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.7932224869728088, |
|
"learning_rate": 5.7270408163265314e-06, |
|
"loss": 1.196, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.7444440126419067, |
|
"learning_rate": 5.7142857142857145e-06, |
|
"loss": 1.1502, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.7612382769584656, |
|
"learning_rate": 5.7015306122448984e-06, |
|
"loss": 1.1523, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.7724157571792603, |
|
"learning_rate": 5.688775510204082e-06, |
|
"loss": 1.1717, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.7933560013771057, |
|
"learning_rate": 5.6760204081632655e-06, |
|
"loss": 1.1802, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.799335777759552, |
|
"learning_rate": 5.663265306122449e-06, |
|
"loss": 1.1609, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.785335123538971, |
|
"learning_rate": 5.6505102040816325e-06, |
|
"loss": 1.1473, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.816709041595459, |
|
"learning_rate": 5.637755102040817e-06, |
|
"loss": 1.1681, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.7700944542884827, |
|
"learning_rate": 5.625e-06, |
|
"loss": 1.1723, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.8849509954452515, |
|
"learning_rate": 5.6122448979591834e-06, |
|
"loss": 1.1664, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.8110070824623108, |
|
"learning_rate": 5.599489795918368e-06, |
|
"loss": 1.1668, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.7989910244941711, |
|
"learning_rate": 5.586734693877551e-06, |
|
"loss": 1.1539, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.7733786702156067, |
|
"learning_rate": 5.573979591836735e-06, |
|
"loss": 1.1631, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.7904419302940369, |
|
"learning_rate": 5.561224489795919e-06, |
|
"loss": 1.1795, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.7549111843109131, |
|
"learning_rate": 5.548469387755102e-06, |
|
"loss": 1.1718, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.7582657337188721, |
|
"learning_rate": 5.535714285714286e-06, |
|
"loss": 1.1506, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.7553848028182983, |
|
"learning_rate": 5.52295918367347e-06, |
|
"loss": 1.1445, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.7763128280639648, |
|
"learning_rate": 5.510204081632653e-06, |
|
"loss": 1.1704, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.7822700142860413, |
|
"learning_rate": 5.497448979591837e-06, |
|
"loss": 1.1687, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.809799313545227, |
|
"learning_rate": 5.48469387755102e-06, |
|
"loss": 1.144, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.8360884189605713, |
|
"learning_rate": 5.471938775510205e-06, |
|
"loss": 1.1737, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.8203660249710083, |
|
"learning_rate": 5.459183673469388e-06, |
|
"loss": 1.1602, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.841998279094696, |
|
"learning_rate": 5.446428571428571e-06, |
|
"loss": 1.1648, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.7999197244644165, |
|
"learning_rate": 5.433673469387756e-06, |
|
"loss": 1.1757, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.7686060070991516, |
|
"learning_rate": 5.420918367346939e-06, |
|
"loss": 1.1717, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.7240757346153259, |
|
"learning_rate": 5.408163265306123e-06, |
|
"loss": 1.1621, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.7930902242660522, |
|
"learning_rate": 5.395408163265307e-06, |
|
"loss": 1.1895, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.7647054195404053, |
|
"learning_rate": 5.38265306122449e-06, |
|
"loss": 1.177, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.7429608106613159, |
|
"learning_rate": 5.369897959183674e-06, |
|
"loss": 1.154, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.7082961201667786, |
|
"learning_rate": 5.357142857142857e-06, |
|
"loss": 1.155, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.7647345066070557, |
|
"learning_rate": 5.344387755102042e-06, |
|
"loss": 1.1478, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.7455657124519348, |
|
"learning_rate": 5.331632653061225e-06, |
|
"loss": 1.1308, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.7820383906364441, |
|
"learning_rate": 5.318877551020408e-06, |
|
"loss": 1.1657, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.7322938442230225, |
|
"learning_rate": 5.306122448979593e-06, |
|
"loss": 1.1629, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.7732429504394531, |
|
"learning_rate": 5.293367346938776e-06, |
|
"loss": 1.1714, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.7132137417793274, |
|
"learning_rate": 5.280612244897959e-06, |
|
"loss": 1.1896, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.7276777625083923, |
|
"learning_rate": 5.267857142857144e-06, |
|
"loss": 1.1426, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.7646544575691223, |
|
"learning_rate": 5.255102040816327e-06, |
|
"loss": 1.1508, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.7866250276565552, |
|
"learning_rate": 5.242346938775511e-06, |
|
"loss": 1.1376, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.8272651433944702, |
|
"learning_rate": 5.229591836734695e-06, |
|
"loss": 1.1431, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.7927102446556091, |
|
"learning_rate": 5.216836734693878e-06, |
|
"loss": 1.1481, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.7916460037231445, |
|
"learning_rate": 5.204081632653062e-06, |
|
"loss": 1.1256, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.7587056159973145, |
|
"learning_rate": 5.191326530612245e-06, |
|
"loss": 1.1555, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.7142450213432312, |
|
"learning_rate": 5.1785714285714296e-06, |
|
"loss": 1.1613, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.7216023802757263, |
|
"learning_rate": 5.165816326530613e-06, |
|
"loss": 1.1603, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.7468694448471069, |
|
"learning_rate": 5.153061224489796e-06, |
|
"loss": 1.1386, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.7479641437530518, |
|
"learning_rate": 5.1403061224489805e-06, |
|
"loss": 1.1295, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.7640284299850464, |
|
"learning_rate": 5.127551020408164e-06, |
|
"loss": 1.152, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.7198285460472107, |
|
"learning_rate": 5.1147959183673475e-06, |
|
"loss": 1.137, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.7455816864967346, |
|
"learning_rate": 5.1020408163265315e-06, |
|
"loss": 1.144, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.7048183679580688, |
|
"learning_rate": 5.0892857142857146e-06, |
|
"loss": 1.1534, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.7082626223564148, |
|
"learning_rate": 5.0765306122448985e-06, |
|
"loss": 1.1539, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.7038515210151672, |
|
"learning_rate": 5.063775510204082e-06, |
|
"loss": 1.1679, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.7405194640159607, |
|
"learning_rate": 5.0510204081632655e-06, |
|
"loss": 1.1375, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.67337566614151, |
|
"learning_rate": 5.0382653061224495e-06, |
|
"loss": 1.1403, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.6937934756278992, |
|
"learning_rate": 5.0255102040816325e-06, |
|
"loss": 1.1401, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.6861513257026672, |
|
"learning_rate": 5.012755102040817e-06, |
|
"loss": 1.174, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.7102193236351013, |
|
"learning_rate": 5e-06, |
|
"loss": 1.1388, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.7060781121253967, |
|
"learning_rate": 4.987244897959184e-06, |
|
"loss": 1.1485, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.7078152894973755, |
|
"learning_rate": 4.974489795918368e-06, |
|
"loss": 1.1599, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.6945005655288696, |
|
"learning_rate": 4.961734693877551e-06, |
|
"loss": 1.1372, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.6945128440856934, |
|
"learning_rate": 4.948979591836735e-06, |
|
"loss": 1.1511, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.6711966395378113, |
|
"learning_rate": 4.936224489795919e-06, |
|
"loss": 1.1296, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.6911993622779846, |
|
"learning_rate": 4.923469387755102e-06, |
|
"loss": 1.1263, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.6859057545661926, |
|
"learning_rate": 4.910714285714286e-06, |
|
"loss": 1.1611, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.6902860403060913, |
|
"learning_rate": 4.897959183673469e-06, |
|
"loss": 1.1328, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.6645172834396362, |
|
"learning_rate": 4.885204081632653e-06, |
|
"loss": 1.1235, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.6861415505409241, |
|
"learning_rate": 4.872448979591837e-06, |
|
"loss": 1.1472, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.6925361156463623, |
|
"learning_rate": 4.859693877551021e-06, |
|
"loss": 1.1594, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.7075345516204834, |
|
"learning_rate": 4.846938775510204e-06, |
|
"loss": 1.1422, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.6742618680000305, |
|
"learning_rate": 4.834183673469388e-06, |
|
"loss": 1.1483, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.6796462535858154, |
|
"learning_rate": 4.821428571428572e-06, |
|
"loss": 1.1222, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.6587576866149902, |
|
"learning_rate": 4.808673469387756e-06, |
|
"loss": 1.1366, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.6835523843765259, |
|
"learning_rate": 4.795918367346939e-06, |
|
"loss": 1.1485, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.7211734056472778, |
|
"learning_rate": 4.783163265306123e-06, |
|
"loss": 1.1432, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.745185136795044, |
|
"learning_rate": 4.770408163265306e-06, |
|
"loss": 1.1461, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.7311437129974365, |
|
"learning_rate": 4.75765306122449e-06, |
|
"loss": 1.1465, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.6965100169181824, |
|
"learning_rate": 4.744897959183674e-06, |
|
"loss": 1.1564, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.6883571147918701, |
|
"learning_rate": 4.732142857142857e-06, |
|
"loss": 1.1296, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.6782147884368896, |
|
"learning_rate": 4.719387755102041e-06, |
|
"loss": 1.1427, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.7144444584846497, |
|
"learning_rate": 4.706632653061225e-06, |
|
"loss": 1.1283, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.6718636155128479, |
|
"learning_rate": 4.693877551020409e-06, |
|
"loss": 1.1198, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.6480969786643982, |
|
"learning_rate": 4.681122448979592e-06, |
|
"loss": 1.1441, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.6612988114356995, |
|
"learning_rate": 4.668367346938776e-06, |
|
"loss": 1.1525, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.6701130270957947, |
|
"learning_rate": 4.65561224489796e-06, |
|
"loss": 1.122, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.6586853265762329, |
|
"learning_rate": 4.642857142857144e-06, |
|
"loss": 1.1153, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.6665288209915161, |
|
"learning_rate": 4.630102040816327e-06, |
|
"loss": 1.1443, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.7110608816146851, |
|
"learning_rate": 4.617346938775511e-06, |
|
"loss": 1.1449, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.6855413913726807, |
|
"learning_rate": 4.604591836734694e-06, |
|
"loss": 1.1427, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.6243303418159485, |
|
"learning_rate": 4.591836734693878e-06, |
|
"loss": 1.146, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.6160845756530762, |
|
"learning_rate": 4.579081632653062e-06, |
|
"loss": 1.1185, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.649599552154541, |
|
"learning_rate": 4.566326530612245e-06, |
|
"loss": 1.1402, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.6832114458084106, |
|
"learning_rate": 4.553571428571429e-06, |
|
"loss": 1.1431, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.6842077970504761, |
|
"learning_rate": 4.540816326530613e-06, |
|
"loss": 1.1403, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.6485678553581238, |
|
"learning_rate": 4.528061224489797e-06, |
|
"loss": 1.1349, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.6563675403594971, |
|
"learning_rate": 4.51530612244898e-06, |
|
"loss": 1.1445, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.6893595457077026, |
|
"learning_rate": 4.502551020408164e-06, |
|
"loss": 1.1303, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.6853702068328857, |
|
"learning_rate": 4.489795918367348e-06, |
|
"loss": 1.1266, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.6498103141784668, |
|
"learning_rate": 4.477040816326531e-06, |
|
"loss": 1.1461, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.6687408089637756, |
|
"learning_rate": 4.464285714285715e-06, |
|
"loss": 1.1364, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.6929579377174377, |
|
"learning_rate": 4.451530612244898e-06, |
|
"loss": 1.1244, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.6577277183532715, |
|
"learning_rate": 4.438775510204082e-06, |
|
"loss": 1.1177, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.6540212035179138, |
|
"learning_rate": 4.4260204081632656e-06, |
|
"loss": 1.1296, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.6687043309211731, |
|
"learning_rate": 4.4132653061224495e-06, |
|
"loss": 1.1158, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.6880722641944885, |
|
"learning_rate": 4.400510204081633e-06, |
|
"loss": 1.1335, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.6755310893058777, |
|
"learning_rate": 4.3877551020408165e-06, |
|
"loss": 1.1346, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.6623722910881042, |
|
"learning_rate": 4.3750000000000005e-06, |
|
"loss": 1.1401, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.6454164981842041, |
|
"learning_rate": 4.362244897959184e-06, |
|
"loss": 1.1204, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.6594773530960083, |
|
"learning_rate": 4.349489795918368e-06, |
|
"loss": 1.126, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.6405773162841797, |
|
"learning_rate": 4.336734693877551e-06, |
|
"loss": 1.1312, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.6566120982170105, |
|
"learning_rate": 4.323979591836735e-06, |
|
"loss": 1.1282, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.6293421387672424, |
|
"learning_rate": 4.3112244897959184e-06, |
|
"loss": 1.1294, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.6095276474952698, |
|
"learning_rate": 4.298469387755102e-06, |
|
"loss": 1.1392, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.6369320750236511, |
|
"learning_rate": 4.2857142857142855e-06, |
|
"loss": 1.1396, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.6458733677864075, |
|
"learning_rate": 4.272959183673469e-06, |
|
"loss": 1.121, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.6436740159988403, |
|
"learning_rate": 4.260204081632653e-06, |
|
"loss": 1.1105, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.65425705909729, |
|
"learning_rate": 4.247448979591837e-06, |
|
"loss": 1.0976, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.6265342831611633, |
|
"learning_rate": 4.234693877551021e-06, |
|
"loss": 1.1275, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.5955907702445984, |
|
"learning_rate": 4.221938775510204e-06, |
|
"loss": 1.119, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.6000078320503235, |
|
"learning_rate": 4.209183673469388e-06, |
|
"loss": 1.1018, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.6637157201766968, |
|
"learning_rate": 4.196428571428572e-06, |
|
"loss": 1.1459, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.6360584497451782, |
|
"learning_rate": 4.183673469387755e-06, |
|
"loss": 1.1192, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.7045226693153381, |
|
"learning_rate": 4.170918367346939e-06, |
|
"loss": 1.1258, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.6479504108428955, |
|
"learning_rate": 4.158163265306123e-06, |
|
"loss": 1.131, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.6127601861953735, |
|
"learning_rate": 4.145408163265306e-06, |
|
"loss": 1.1197, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.6368497610092163, |
|
"learning_rate": 4.13265306122449e-06, |
|
"loss": 1.1236, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.6386613249778748, |
|
"learning_rate": 4.119897959183674e-06, |
|
"loss": 1.1188, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.587577760219574, |
|
"learning_rate": 4.107142857142857e-06, |
|
"loss": 1.1234, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.5672041177749634, |
|
"learning_rate": 4.094387755102041e-06, |
|
"loss": 1.1357, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.5886520743370056, |
|
"learning_rate": 4.081632653061225e-06, |
|
"loss": 1.101, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.611720085144043, |
|
"learning_rate": 4.068877551020409e-06, |
|
"loss": 1.118, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.6223128437995911, |
|
"learning_rate": 4.056122448979592e-06, |
|
"loss": 1.1232, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.620149552822113, |
|
"learning_rate": 4.043367346938776e-06, |
|
"loss": 1.1163, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.6052107214927673, |
|
"learning_rate": 4.03061224489796e-06, |
|
"loss": 1.1252, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.6238659024238586, |
|
"learning_rate": 4.017857142857143e-06, |
|
"loss": 1.1146, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.6172593235969543, |
|
"learning_rate": 4.005102040816327e-06, |
|
"loss": 1.1265, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.6135565042495728, |
|
"learning_rate": 3.99234693877551e-06, |
|
"loss": 1.108, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.5971201062202454, |
|
"learning_rate": 3.979591836734694e-06, |
|
"loss": 1.1218, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.6215802431106567, |
|
"learning_rate": 3.966836734693878e-06, |
|
"loss": 1.1328, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.6160227060317993, |
|
"learning_rate": 3.954081632653062e-06, |
|
"loss": 1.1146, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.586821973323822, |
|
"learning_rate": 3.941326530612245e-06, |
|
"loss": 1.0882, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.5976683497428894, |
|
"learning_rate": 3.928571428571429e-06, |
|
"loss": 1.1128, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.6293664574623108, |
|
"learning_rate": 3.915816326530613e-06, |
|
"loss": 1.0857, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.598693311214447, |
|
"learning_rate": 3.903061224489797e-06, |
|
"loss": 1.121, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.6131600141525269, |
|
"learning_rate": 3.89030612244898e-06, |
|
"loss": 1.1177, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.5944992899894714, |
|
"learning_rate": 3.877551020408164e-06, |
|
"loss": 1.1065, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.618480920791626, |
|
"learning_rate": 3.864795918367348e-06, |
|
"loss": 1.1004, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.62984699010849, |
|
"learning_rate": 3.852040816326531e-06, |
|
"loss": 1.1242, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.6211289167404175, |
|
"learning_rate": 3.839285714285715e-06, |
|
"loss": 1.1078, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.6085867285728455, |
|
"learning_rate": 3.826530612244898e-06, |
|
"loss": 1.0988, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.606937050819397, |
|
"learning_rate": 3.8137755102040817e-06, |
|
"loss": 1.1233, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.6092613935470581, |
|
"learning_rate": 3.8010204081632656e-06, |
|
"loss": 1.1143, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.5972644686698914, |
|
"learning_rate": 3.7882653061224496e-06, |
|
"loss": 1.1287, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.6045836210250854, |
|
"learning_rate": 3.7755102040816327e-06, |
|
"loss": 1.1208, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.598455548286438, |
|
"learning_rate": 3.7627551020408166e-06, |
|
"loss": 1.1156, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.6008214950561523, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 1.1008, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.6205429434776306, |
|
"learning_rate": 3.737244897959184e-06, |
|
"loss": 1.0959, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.5987078547477722, |
|
"learning_rate": 3.724489795918368e-06, |
|
"loss": 1.1104, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.6508322358131409, |
|
"learning_rate": 3.711734693877551e-06, |
|
"loss": 1.1084, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.6050755381584167, |
|
"learning_rate": 3.698979591836735e-06, |
|
"loss": 1.0987, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.6434103846549988, |
|
"learning_rate": 3.686224489795919e-06, |
|
"loss": 1.096, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.5728334188461304, |
|
"learning_rate": 3.6734693877551024e-06, |
|
"loss": 1.1214, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.588948130607605, |
|
"learning_rate": 3.660714285714286e-06, |
|
"loss": 1.1125, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.6084712743759155, |
|
"learning_rate": 3.6479591836734694e-06, |
|
"loss": 1.1193, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.6148542165756226, |
|
"learning_rate": 3.6352040816326534e-06, |
|
"loss": 1.0953, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.5736531615257263, |
|
"learning_rate": 3.6224489795918373e-06, |
|
"loss": 1.097, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.5873562693595886, |
|
"learning_rate": 3.609693877551021e-06, |
|
"loss": 1.1221, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.6174765229225159, |
|
"learning_rate": 3.5969387755102043e-06, |
|
"loss": 1.1282, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.6031166911125183, |
|
"learning_rate": 3.584183673469388e-06, |
|
"loss": 1.1025, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.6206244230270386, |
|
"learning_rate": 3.5714285714285718e-06, |
|
"loss": 1.1228, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.5885174870491028, |
|
"learning_rate": 3.5586734693877557e-06, |
|
"loss": 1.1154, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.5698478817939758, |
|
"learning_rate": 3.545918367346939e-06, |
|
"loss": 1.1015, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.5861687064170837, |
|
"learning_rate": 3.5331632653061227e-06, |
|
"loss": 1.1024, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.5565373301506042, |
|
"learning_rate": 3.5204081632653062e-06, |
|
"loss": 1.0974, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.5508489012718201, |
|
"learning_rate": 3.50765306122449e-06, |
|
"loss": 1.1067, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.5698209404945374, |
|
"learning_rate": 3.494897959183674e-06, |
|
"loss": 1.1008, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.5689241290092468, |
|
"learning_rate": 3.482142857142857e-06, |
|
"loss": 1.1046, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.5628039836883545, |
|
"learning_rate": 3.469387755102041e-06, |
|
"loss": 1.1155, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.5791564583778381, |
|
"learning_rate": 3.456632653061225e-06, |
|
"loss": 1.1209, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.5574327707290649, |
|
"learning_rate": 3.4438775510204086e-06, |
|
"loss": 1.1108, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.5694270730018616, |
|
"learning_rate": 3.431122448979592e-06, |
|
"loss": 1.1218, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.5663242936134338, |
|
"learning_rate": 3.4183673469387756e-06, |
|
"loss": 1.1257, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.5686165690422058, |
|
"learning_rate": 3.4056122448979595e-06, |
|
"loss": 1.105, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.5621039271354675, |
|
"learning_rate": 3.3928571428571435e-06, |
|
"loss": 1.0964, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.563789427280426, |
|
"learning_rate": 3.3801020408163266e-06, |
|
"loss": 1.1057, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.5354847311973572, |
|
"learning_rate": 3.3673469387755105e-06, |
|
"loss": 1.0998, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.5717220306396484, |
|
"learning_rate": 3.354591836734694e-06, |
|
"loss": 1.1188, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.6077321171760559, |
|
"learning_rate": 3.341836734693878e-06, |
|
"loss": 1.0788, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.5820598006248474, |
|
"learning_rate": 3.329081632653062e-06, |
|
"loss": 1.1001, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.5642079710960388, |
|
"learning_rate": 3.316326530612245e-06, |
|
"loss": 1.1198, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.555977463722229, |
|
"learning_rate": 3.303571428571429e-06, |
|
"loss": 1.1024, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.5402265787124634, |
|
"learning_rate": 3.2908163265306124e-06, |
|
"loss": 1.105, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.566189169883728, |
|
"learning_rate": 3.2780612244897963e-06, |
|
"loss": 1.1138, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.5901975035667419, |
|
"learning_rate": 3.2653061224489794e-06, |
|
"loss": 1.0954, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.5786535739898682, |
|
"learning_rate": 3.2525510204081634e-06, |
|
"loss": 1.1033, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.5403233170509338, |
|
"learning_rate": 3.2397959183673473e-06, |
|
"loss": 1.1109, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.5486624836921692, |
|
"learning_rate": 3.227040816326531e-06, |
|
"loss": 1.1187, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.5421629548072815, |
|
"learning_rate": 3.2142857142857147e-06, |
|
"loss": 1.1036, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.5433635115623474, |
|
"learning_rate": 3.201530612244898e-06, |
|
"loss": 1.1052, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.5454135537147522, |
|
"learning_rate": 3.1887755102040818e-06, |
|
"loss": 1.1061, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.5338699221611023, |
|
"learning_rate": 3.1760204081632657e-06, |
|
"loss": 1.1037, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.5373792052268982, |
|
"learning_rate": 3.1632653061224496e-06, |
|
"loss": 1.119, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.5319128036499023, |
|
"learning_rate": 3.1505102040816327e-06, |
|
"loss": 1.1083, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.5329867005348206, |
|
"learning_rate": 3.1377551020408166e-06, |
|
"loss": 1.0911, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.539853572845459, |
|
"learning_rate": 3.125e-06, |
|
"loss": 1.1104, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.555448591709137, |
|
"learning_rate": 3.112244897959184e-06, |
|
"loss": 1.1012, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.5585979223251343, |
|
"learning_rate": 3.099489795918368e-06, |
|
"loss": 1.1, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.5565794110298157, |
|
"learning_rate": 3.086734693877551e-06, |
|
"loss": 1.0868, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.583026111125946, |
|
"learning_rate": 3.073979591836735e-06, |
|
"loss": 1.1007, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.5382760167121887, |
|
"learning_rate": 3.0612244897959185e-06, |
|
"loss": 1.0872, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.5512996315956116, |
|
"learning_rate": 3.0484693877551025e-06, |
|
"loss": 1.0912, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.5412560105323792, |
|
"learning_rate": 3.0357142857142856e-06, |
|
"loss": 1.085, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.5486542582511902, |
|
"learning_rate": 3.0229591836734695e-06, |
|
"loss": 1.0838, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.5376004576683044, |
|
"learning_rate": 3.0102040816326534e-06, |
|
"loss": 1.0753, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.6080600023269653, |
|
"learning_rate": 2.997448979591837e-06, |
|
"loss": 1.1179, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.56026691198349, |
|
"learning_rate": 2.984693877551021e-06, |
|
"loss": 1.1046, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.5503661036491394, |
|
"learning_rate": 2.971938775510204e-06, |
|
"loss": 1.0967, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.5562008023262024, |
|
"learning_rate": 2.959183673469388e-06, |
|
"loss": 1.1024, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.5466132760047913, |
|
"learning_rate": 2.946428571428572e-06, |
|
"loss": 1.0958, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.5139285922050476, |
|
"learning_rate": 2.9336734693877553e-06, |
|
"loss": 1.0891, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.5482622981071472, |
|
"learning_rate": 2.920918367346939e-06, |
|
"loss": 1.1028, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.5475634336471558, |
|
"learning_rate": 2.908163265306123e-06, |
|
"loss": 1.0863, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.5316178798675537, |
|
"learning_rate": 2.8954081632653063e-06, |
|
"loss": 1.0858, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.5294205546379089, |
|
"learning_rate": 2.8826530612244902e-06, |
|
"loss": 1.099, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.5268580317497253, |
|
"learning_rate": 2.869897959183674e-06, |
|
"loss": 1.0836, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.5232135057449341, |
|
"learning_rate": 2.8571428571428573e-06, |
|
"loss": 1.091, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.5184506773948669, |
|
"learning_rate": 2.844387755102041e-06, |
|
"loss": 1.0723, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.5274559855461121, |
|
"learning_rate": 2.8316326530612247e-06, |
|
"loss": 1.0967, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.5404233932495117, |
|
"learning_rate": 2.8188775510204086e-06, |
|
"loss": 1.0946, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.5538030862808228, |
|
"learning_rate": 2.8061224489795917e-06, |
|
"loss": 1.0926, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.5240524411201477, |
|
"learning_rate": 2.7933673469387757e-06, |
|
"loss": 1.0954, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.5182491540908813, |
|
"learning_rate": 2.7806122448979596e-06, |
|
"loss": 1.098, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.5135244727134705, |
|
"learning_rate": 2.767857142857143e-06, |
|
"loss": 1.0896, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.508006751537323, |
|
"learning_rate": 2.7551020408163266e-06, |
|
"loss": 1.1097, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.5241822004318237, |
|
"learning_rate": 2.74234693877551e-06, |
|
"loss": 1.0893, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.5380449295043945, |
|
"learning_rate": 2.729591836734694e-06, |
|
"loss": 1.0935, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.5482093095779419, |
|
"learning_rate": 2.716836734693878e-06, |
|
"loss": 1.08, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.5596063733100891, |
|
"learning_rate": 2.7040816326530615e-06, |
|
"loss": 1.098, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.5343565940856934, |
|
"learning_rate": 2.691326530612245e-06, |
|
"loss": 1.0778, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.5278717279434204, |
|
"learning_rate": 2.6785714285714285e-06, |
|
"loss": 1.0821, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.536170482635498, |
|
"learning_rate": 2.6658163265306125e-06, |
|
"loss": 1.1046, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.5198537707328796, |
|
"learning_rate": 2.6530612244897964e-06, |
|
"loss": 1.1022, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.507614016532898, |
|
"learning_rate": 2.6403061224489795e-06, |
|
"loss": 1.082, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.5223891735076904, |
|
"learning_rate": 2.6275510204081634e-06, |
|
"loss": 1.0692, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.5129234194755554, |
|
"learning_rate": 2.6147959183673473e-06, |
|
"loss": 1.0866, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.5176243782043457, |
|
"learning_rate": 2.602040816326531e-06, |
|
"loss": 1.0954, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.5537042617797852, |
|
"learning_rate": 2.5892857142857148e-06, |
|
"loss": 1.0884, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.5302846431732178, |
|
"learning_rate": 2.576530612244898e-06, |
|
"loss": 1.0809, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.5051791071891785, |
|
"learning_rate": 2.563775510204082e-06, |
|
"loss": 1.0884, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.5340541005134583, |
|
"learning_rate": 2.5510204081632657e-06, |
|
"loss": 1.0967, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.5301932096481323, |
|
"learning_rate": 2.5382653061224492e-06, |
|
"loss": 1.1186, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.5282807946205139, |
|
"learning_rate": 2.5255102040816328e-06, |
|
"loss": 1.0987, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.49649927020072937, |
|
"learning_rate": 2.5127551020408163e-06, |
|
"loss": 1.0759, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.5125253796577454, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.0888, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.502814531326294, |
|
"learning_rate": 2.487244897959184e-06, |
|
"loss": 1.0712, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.5090625882148743, |
|
"learning_rate": 2.4744897959183676e-06, |
|
"loss": 1.0822, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.5162419080734253, |
|
"learning_rate": 2.461734693877551e-06, |
|
"loss": 1.0973, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.5318601131439209, |
|
"learning_rate": 2.4489795918367347e-06, |
|
"loss": 1.0992, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.5034762024879456, |
|
"learning_rate": 2.4362244897959186e-06, |
|
"loss": 1.0737, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.5142484903335571, |
|
"learning_rate": 2.423469387755102e-06, |
|
"loss": 1.0993, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.5127019882202148, |
|
"learning_rate": 2.410714285714286e-06, |
|
"loss": 1.0901, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.526448130607605, |
|
"learning_rate": 2.3979591836734696e-06, |
|
"loss": 1.0787, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.5332403779029846, |
|
"learning_rate": 2.385204081632653e-06, |
|
"loss": 1.0685, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.5180906057357788, |
|
"learning_rate": 2.372448979591837e-06, |
|
"loss": 1.0878, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.5121365785598755, |
|
"learning_rate": 2.3596938775510205e-06, |
|
"loss": 1.0657, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.5013092756271362, |
|
"learning_rate": 2.3469387755102044e-06, |
|
"loss": 1.0805, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.5061728358268738, |
|
"learning_rate": 2.334183673469388e-06, |
|
"loss": 1.0877, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.5036882162094116, |
|
"learning_rate": 2.321428571428572e-06, |
|
"loss": 1.0864, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.5100290179252625, |
|
"learning_rate": 2.3086734693877554e-06, |
|
"loss": 1.0826, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.5077418088912964, |
|
"learning_rate": 2.295918367346939e-06, |
|
"loss": 1.0978, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.505440890789032, |
|
"learning_rate": 2.2831632653061224e-06, |
|
"loss": 1.0913, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.5235510468482971, |
|
"learning_rate": 2.2704081632653064e-06, |
|
"loss": 1.0969, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.5321602821350098, |
|
"learning_rate": 2.25765306122449e-06, |
|
"loss": 1.0843, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.5095252394676208, |
|
"learning_rate": 2.244897959183674e-06, |
|
"loss": 1.0817, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.5119527578353882, |
|
"learning_rate": 2.2321428571428573e-06, |
|
"loss": 1.0967, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.5339480042457581, |
|
"learning_rate": 2.219387755102041e-06, |
|
"loss": 1.0872, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.5127794146537781, |
|
"learning_rate": 2.2066326530612248e-06, |
|
"loss": 1.0788, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.5119395852088928, |
|
"learning_rate": 2.1938775510204083e-06, |
|
"loss": 1.0591, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.5011995434761047, |
|
"learning_rate": 2.181122448979592e-06, |
|
"loss": 1.0838, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.5067063570022583, |
|
"learning_rate": 2.1683673469387757e-06, |
|
"loss": 1.0846, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.5018548369407654, |
|
"learning_rate": 2.1556122448979592e-06, |
|
"loss": 1.0852, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.5224974751472473, |
|
"learning_rate": 2.1428571428571427e-06, |
|
"loss": 1.0722, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.5073409676551819, |
|
"learning_rate": 2.1301020408163267e-06, |
|
"loss": 1.0983, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.5096011757850647, |
|
"learning_rate": 2.1173469387755106e-06, |
|
"loss": 1.0708, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.5168206691741943, |
|
"learning_rate": 2.104591836734694e-06, |
|
"loss": 1.0731, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.5258650779724121, |
|
"learning_rate": 2.0918367346938776e-06, |
|
"loss": 1.0913, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.4983409643173218, |
|
"learning_rate": 2.0790816326530616e-06, |
|
"loss": 1.0616, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.5054186582565308, |
|
"learning_rate": 2.066326530612245e-06, |
|
"loss": 1.0925, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.5062727928161621, |
|
"learning_rate": 2.0535714285714286e-06, |
|
"loss": 1.0937, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.5056551098823547, |
|
"learning_rate": 2.0408163265306125e-06, |
|
"loss": 1.0859, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.5142880082130432, |
|
"learning_rate": 2.028061224489796e-06, |
|
"loss": 1.0837, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.5090142488479614, |
|
"learning_rate": 2.01530612244898e-06, |
|
"loss": 1.0669, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.5223041772842407, |
|
"learning_rate": 2.0025510204081635e-06, |
|
"loss": 1.0775, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.5301938056945801, |
|
"learning_rate": 1.989795918367347e-06, |
|
"loss": 1.1039, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.512309193611145, |
|
"learning_rate": 1.977040816326531e-06, |
|
"loss": 1.0749, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.5122722387313843, |
|
"learning_rate": 1.9642857142857144e-06, |
|
"loss": 1.0885, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.5142942070960999, |
|
"learning_rate": 1.9515306122448984e-06, |
|
"loss": 1.0954, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.5099963545799255, |
|
"learning_rate": 1.938775510204082e-06, |
|
"loss": 1.0924, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.5308962464332581, |
|
"learning_rate": 1.9260204081632654e-06, |
|
"loss": 1.075, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.5025672316551208, |
|
"learning_rate": 1.913265306122449e-06, |
|
"loss": 1.0915, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.5081654191017151, |
|
"learning_rate": 1.9005102040816328e-06, |
|
"loss": 1.062, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.5062184929847717, |
|
"learning_rate": 1.8877551020408163e-06, |
|
"loss": 1.0687, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.48296964168548584, |
|
"learning_rate": 1.8750000000000003e-06, |
|
"loss": 1.063, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.4898391664028168, |
|
"learning_rate": 1.862244897959184e-06, |
|
"loss": 1.0841, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.5018566250801086, |
|
"learning_rate": 1.8494897959183675e-06, |
|
"loss": 1.0871, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.5019533634185791, |
|
"learning_rate": 1.8367346938775512e-06, |
|
"loss": 1.0819, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.4855716824531555, |
|
"learning_rate": 1.8239795918367347e-06, |
|
"loss": 1.0835, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.4964170753955841, |
|
"learning_rate": 1.8112244897959187e-06, |
|
"loss": 1.0932, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.4965682029724121, |
|
"learning_rate": 1.7984693877551022e-06, |
|
"loss": 1.0714, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.4963724911212921, |
|
"learning_rate": 1.7857142857142859e-06, |
|
"loss": 1.0714, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.49885648488998413, |
|
"learning_rate": 1.7729591836734694e-06, |
|
"loss": 1.0824, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.49728772044181824, |
|
"learning_rate": 1.7602040816326531e-06, |
|
"loss": 1.0873, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.5012596845626831, |
|
"learning_rate": 1.747448979591837e-06, |
|
"loss": 1.07, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.49209412932395935, |
|
"learning_rate": 1.7346938775510206e-06, |
|
"loss": 1.0808, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.4979517459869385, |
|
"learning_rate": 1.7219387755102043e-06, |
|
"loss": 1.0632, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.49073830246925354, |
|
"learning_rate": 1.7091836734693878e-06, |
|
"loss": 1.0824, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.5171460509300232, |
|
"learning_rate": 1.6964285714285717e-06, |
|
"loss": 1.0614, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.49084386229515076, |
|
"learning_rate": 1.6836734693877552e-06, |
|
"loss": 1.0899, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.5033416152000427, |
|
"learning_rate": 1.670918367346939e-06, |
|
"loss": 1.1004, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.49504750967025757, |
|
"learning_rate": 1.6581632653061225e-06, |
|
"loss": 1.0621, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.4942651391029358, |
|
"learning_rate": 1.6454081632653062e-06, |
|
"loss": 1.0776, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.4950351417064667, |
|
"learning_rate": 1.6326530612244897e-06, |
|
"loss": 1.0887, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.5089950561523438, |
|
"learning_rate": 1.6198979591836736e-06, |
|
"loss": 1.0815, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.5063705444335938, |
|
"learning_rate": 1.6071428571428574e-06, |
|
"loss": 1.0835, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.4940871298313141, |
|
"learning_rate": 1.5943877551020409e-06, |
|
"loss": 1.0913, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.4951344430446625, |
|
"learning_rate": 1.5816326530612248e-06, |
|
"loss": 1.0625, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.5025525093078613, |
|
"learning_rate": 1.5688775510204083e-06, |
|
"loss": 1.097, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.5122385025024414, |
|
"learning_rate": 1.556122448979592e-06, |
|
"loss": 1.0823, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.4933209717273712, |
|
"learning_rate": 1.5433673469387756e-06, |
|
"loss": 1.0802, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.513375461101532, |
|
"learning_rate": 1.5306122448979593e-06, |
|
"loss": 1.089, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.5016956925392151, |
|
"learning_rate": 1.5178571428571428e-06, |
|
"loss": 1.0624, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.48563146591186523, |
|
"learning_rate": 1.5051020408163267e-06, |
|
"loss": 1.0812, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.49049103260040283, |
|
"learning_rate": 1.4923469387755104e-06, |
|
"loss": 1.0829, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.5047158002853394, |
|
"learning_rate": 1.479591836734694e-06, |
|
"loss": 1.0933, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.4831494092941284, |
|
"learning_rate": 1.4668367346938777e-06, |
|
"loss": 1.0883, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.4787144958972931, |
|
"learning_rate": 1.4540816326530614e-06, |
|
"loss": 1.0638, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.4906410276889801, |
|
"learning_rate": 1.4413265306122451e-06, |
|
"loss": 1.0729, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.48908156156539917, |
|
"learning_rate": 1.4285714285714286e-06, |
|
"loss": 1.0749, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.48697564005851746, |
|
"learning_rate": 1.4158163265306123e-06, |
|
"loss": 1.0904, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.5088189840316772, |
|
"learning_rate": 1.4030612244897959e-06, |
|
"loss": 1.0691, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.5027821063995361, |
|
"learning_rate": 1.3903061224489798e-06, |
|
"loss": 1.073, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.4827651381492615, |
|
"learning_rate": 1.3775510204081633e-06, |
|
"loss": 1.0669, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.5015129446983337, |
|
"learning_rate": 1.364795918367347e-06, |
|
"loss": 1.0739, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.4779413640499115, |
|
"learning_rate": 1.3520408163265307e-06, |
|
"loss": 1.0801, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.4807455837726593, |
|
"learning_rate": 1.3392857142857143e-06, |
|
"loss": 1.0599, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.49424856901168823, |
|
"learning_rate": 1.3265306122448982e-06, |
|
"loss": 1.0793, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.4978589415550232, |
|
"learning_rate": 1.3137755102040817e-06, |
|
"loss": 1.0783, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.49701061844825745, |
|
"learning_rate": 1.3010204081632654e-06, |
|
"loss": 1.0784, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.078125, |
|
"eval_runtime": 119.7086, |
|
"eval_samples_per_second": 68.516, |
|
"eval_steps_per_second": 34.258, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.48835909366607666, |
|
"learning_rate": 1.288265306122449e-06, |
|
"loss": 1.0646, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.4778783321380615, |
|
"learning_rate": 1.2755102040816329e-06, |
|
"loss": 1.0798, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.48762667179107666, |
|
"learning_rate": 1.2627551020408164e-06, |
|
"loss": 1.0637, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.4785713255405426, |
|
"learning_rate": 1.25e-06, |
|
"loss": 1.0451, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.4821445345878601, |
|
"learning_rate": 1.2372448979591838e-06, |
|
"loss": 1.0519, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.4918241500854492, |
|
"learning_rate": 1.2244897959183673e-06, |
|
"loss": 1.0555, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.47542768716812134, |
|
"learning_rate": 1.211734693877551e-06, |
|
"loss": 1.0716, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.4874721169471741, |
|
"learning_rate": 1.1989795918367348e-06, |
|
"loss": 1.0691, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.4866637885570526, |
|
"learning_rate": 1.1862244897959185e-06, |
|
"loss": 1.0581, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.48888394236564636, |
|
"learning_rate": 1.1734693877551022e-06, |
|
"loss": 1.0514, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.48897311091423035, |
|
"learning_rate": 1.160714285714286e-06, |
|
"loss": 1.0798, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 0.4837857782840729, |
|
"learning_rate": 1.1479591836734695e-06, |
|
"loss": 1.0658, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.4774113595485687, |
|
"learning_rate": 1.1352040816326532e-06, |
|
"loss": 1.0721, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.4952887296676636, |
|
"learning_rate": 1.122448979591837e-06, |
|
"loss": 1.0591, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.4889083802700043, |
|
"learning_rate": 1.1096938775510204e-06, |
|
"loss": 1.0418, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.49243977665901184, |
|
"learning_rate": 1.0969387755102041e-06, |
|
"loss": 1.0455, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.495235800743103, |
|
"learning_rate": 1.0841836734693879e-06, |
|
"loss": 1.0556, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.4856567084789276, |
|
"learning_rate": 1.0714285714285714e-06, |
|
"loss": 1.0485, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.49020230770111084, |
|
"learning_rate": 1.0586734693877553e-06, |
|
"loss": 1.0468, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"grad_norm": 0.47778093814849854, |
|
"learning_rate": 1.0459183673469388e-06, |
|
"loss": 1.062, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.47502633929252625, |
|
"learning_rate": 1.0331632653061225e-06, |
|
"loss": 1.0676, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.4991995096206665, |
|
"learning_rate": 1.0204081632653063e-06, |
|
"loss": 1.0664, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.5038496255874634, |
|
"learning_rate": 1.00765306122449e-06, |
|
"loss": 1.0635, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.4831908345222473, |
|
"learning_rate": 9.948979591836735e-07, |
|
"loss": 1.0793, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.48362016677856445, |
|
"learning_rate": 9.821428571428572e-07, |
|
"loss": 1.059, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.4806089699268341, |
|
"learning_rate": 9.69387755102041e-07, |
|
"loss": 1.054, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.4738354980945587, |
|
"learning_rate": 9.566326530612244e-07, |
|
"loss": 1.0863, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 0.49784815311431885, |
|
"learning_rate": 9.438775510204082e-07, |
|
"loss": 1.0824, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.48209649324417114, |
|
"learning_rate": 9.31122448979592e-07, |
|
"loss": 1.0669, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.4703362286090851, |
|
"learning_rate": 9.183673469387756e-07, |
|
"loss": 1.046, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.48009294271469116, |
|
"learning_rate": 9.056122448979593e-07, |
|
"loss": 1.0486, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.4972153306007385, |
|
"learning_rate": 8.928571428571429e-07, |
|
"loss": 1.0625, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.4931214451789856, |
|
"learning_rate": 8.801020408163266e-07, |
|
"loss": 1.0627, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.5141698718070984, |
|
"learning_rate": 8.673469387755103e-07, |
|
"loss": 1.0644, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.4702330529689789, |
|
"learning_rate": 8.545918367346939e-07, |
|
"loss": 1.0533, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 0.4845568835735321, |
|
"learning_rate": 8.418367346938776e-07, |
|
"loss": 1.0402, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.4730464518070221, |
|
"learning_rate": 8.290816326530612e-07, |
|
"loss": 1.0373, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.49070635437965393, |
|
"learning_rate": 8.163265306122449e-07, |
|
"loss": 1.0584, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.47989973425865173, |
|
"learning_rate": 8.035714285714287e-07, |
|
"loss": 1.0455, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.48409467935562134, |
|
"learning_rate": 7.908163265306124e-07, |
|
"loss": 1.0597, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.4711008667945862, |
|
"learning_rate": 7.78061224489796e-07, |
|
"loss": 1.0606, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.4811023771762848, |
|
"learning_rate": 7.653061224489796e-07, |
|
"loss": 1.0624, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.4816673994064331, |
|
"learning_rate": 7.525510204081634e-07, |
|
"loss": 1.0628, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"grad_norm": 0.5013629198074341, |
|
"learning_rate": 7.39795918367347e-07, |
|
"loss": 1.0311, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.49255239963531494, |
|
"learning_rate": 7.270408163265307e-07, |
|
"loss": 1.0812, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.49093401432037354, |
|
"learning_rate": 7.142857142857143e-07, |
|
"loss": 1.0478, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.46841347217559814, |
|
"learning_rate": 7.015306122448979e-07, |
|
"loss": 1.0526, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.48145774006843567, |
|
"learning_rate": 6.887755102040817e-07, |
|
"loss": 1.0521, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.48966702818870544, |
|
"learning_rate": 6.760204081632654e-07, |
|
"loss": 1.0602, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.47580987215042114, |
|
"learning_rate": 6.632653061224491e-07, |
|
"loss": 1.056, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.48572811484336853, |
|
"learning_rate": 6.505102040816327e-07, |
|
"loss": 1.0443, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 0.4886782467365265, |
|
"learning_rate": 6.377551020408164e-07, |
|
"loss": 1.0551, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.4674248695373535, |
|
"learning_rate": 6.25e-07, |
|
"loss": 1.0462, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.48394903540611267, |
|
"learning_rate": 6.122448979591837e-07, |
|
"loss": 1.0645, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.4802626073360443, |
|
"learning_rate": 5.994897959183674e-07, |
|
"loss": 1.0481, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.49721604585647583, |
|
"learning_rate": 5.867346938775511e-07, |
|
"loss": 1.0787, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.4986632764339447, |
|
"learning_rate": 5.739795918367347e-07, |
|
"loss": 1.0496, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.5088779330253601, |
|
"learning_rate": 5.612244897959184e-07, |
|
"loss": 1.0568, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.4756823778152466, |
|
"learning_rate": 5.484693877551021e-07, |
|
"loss": 1.0526, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 0.48517045378685, |
|
"learning_rate": 5.357142857142857e-07, |
|
"loss": 1.0592, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.47856712341308594, |
|
"learning_rate": 5.229591836734694e-07, |
|
"loss": 1.0521, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.4860280454158783, |
|
"learning_rate": 5.102040816326531e-07, |
|
"loss": 1.0696, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.48423293232917786, |
|
"learning_rate": 4.974489795918367e-07, |
|
"loss": 1.0502, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.4771590530872345, |
|
"learning_rate": 4.846938775510205e-07, |
|
"loss": 1.0739, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.4849052131175995, |
|
"learning_rate": 4.719387755102041e-07, |
|
"loss": 1.0596, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.48700183629989624, |
|
"learning_rate": 4.591836734693878e-07, |
|
"loss": 1.0519, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.48286789655685425, |
|
"learning_rate": 4.4642857142857147e-07, |
|
"loss": 1.0695, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 0.4692079722881317, |
|
"learning_rate": 4.3367346938775514e-07, |
|
"loss": 1.0601, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.48513585329055786, |
|
"learning_rate": 4.209183673469388e-07, |
|
"loss": 1.0673, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.4999236464500427, |
|
"learning_rate": 4.0816326530612243e-07, |
|
"loss": 1.0706, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.48926645517349243, |
|
"learning_rate": 3.954081632653062e-07, |
|
"loss": 1.0432, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.4762410819530487, |
|
"learning_rate": 3.826530612244898e-07, |
|
"loss": 1.0508, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.47507932782173157, |
|
"learning_rate": 3.698979591836735e-07, |
|
"loss": 1.0535, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.47133708000183105, |
|
"learning_rate": 3.5714285714285716e-07, |
|
"loss": 1.0429, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.4802726209163666, |
|
"learning_rate": 3.443877551020408e-07, |
|
"loss": 1.0506, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"grad_norm": 0.4963398277759552, |
|
"learning_rate": 3.3163265306122455e-07, |
|
"loss": 1.0778, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.4718584418296814, |
|
"learning_rate": 3.188775510204082e-07, |
|
"loss": 1.0353, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.4984806180000305, |
|
"learning_rate": 3.0612244897959183e-07, |
|
"loss": 1.0669, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.4719929099082947, |
|
"learning_rate": 2.9336734693877556e-07, |
|
"loss": 1.0425, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"grad_norm": 0.48878350853919983, |
|
"learning_rate": 2.806122448979592e-07, |
|
"loss": 1.0578, |
|
"step": 872 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 872, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 1, |
|
"total_flos": 1.197452692704723e+18, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|