SciLitLLM1.5-14B / trainer_state.json
Uni-SMART's picture
First commit
b5ce0d2 verified
raw
history blame
170 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.004580152671756,
"eval_steps": 327,
"global_step": 984,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0030534351145038168,
"grad_norm": 87.75113677978516,
"learning_rate": 1.5151515151515152e-07,
"loss": 4.9268,
"step": 1
},
{
"epoch": 0.0061068702290076335,
"grad_norm": 80.01862335205078,
"learning_rate": 3.0303030303030305e-07,
"loss": 4.5522,
"step": 2
},
{
"epoch": 0.00916030534351145,
"grad_norm": 87.5570068359375,
"learning_rate": 4.5454545454545457e-07,
"loss": 5.0574,
"step": 3
},
{
"epoch": 0.012213740458015267,
"grad_norm": 85.17759704589844,
"learning_rate": 6.060606060606061e-07,
"loss": 5.0034,
"step": 4
},
{
"epoch": 0.015267175572519083,
"grad_norm": 77.56900787353516,
"learning_rate": 7.575757575757576e-07,
"loss": 4.9534,
"step": 5
},
{
"epoch": 0.0183206106870229,
"grad_norm": 72.43783569335938,
"learning_rate": 9.090909090909091e-07,
"loss": 4.4445,
"step": 6
},
{
"epoch": 0.021374045801526718,
"grad_norm": 80.14994812011719,
"learning_rate": 1.0606060606060608e-06,
"loss": 4.16,
"step": 7
},
{
"epoch": 0.024427480916030534,
"grad_norm": 76.3841781616211,
"learning_rate": 1.2121212121212122e-06,
"loss": 4.4955,
"step": 8
},
{
"epoch": 0.02748091603053435,
"grad_norm": 65.49907684326172,
"learning_rate": 1.3636363636363636e-06,
"loss": 4.3141,
"step": 9
},
{
"epoch": 0.030534351145038167,
"grad_norm": 68.10660552978516,
"learning_rate": 1.5151515151515152e-06,
"loss": 4.346,
"step": 10
},
{
"epoch": 0.03358778625954199,
"grad_norm": 51.76671600341797,
"learning_rate": 1.6666666666666667e-06,
"loss": 3.2792,
"step": 11
},
{
"epoch": 0.0366412213740458,
"grad_norm": 85.08094024658203,
"learning_rate": 1.8181818181818183e-06,
"loss": 2.4323,
"step": 12
},
{
"epoch": 0.03969465648854962,
"grad_norm": 62.03458023071289,
"learning_rate": 1.96969696969697e-06,
"loss": 1.8843,
"step": 13
},
{
"epoch": 0.042748091603053436,
"grad_norm": 60.39155578613281,
"learning_rate": 2.1212121212121216e-06,
"loss": 1.8758,
"step": 14
},
{
"epoch": 0.04580152671755725,
"grad_norm": 52.510433197021484,
"learning_rate": 2.2727272727272728e-06,
"loss": 1.5855,
"step": 15
},
{
"epoch": 0.04885496183206107,
"grad_norm": 10.294404029846191,
"learning_rate": 2.4242424242424244e-06,
"loss": 0.8323,
"step": 16
},
{
"epoch": 0.051908396946564885,
"grad_norm": 16.641094207763672,
"learning_rate": 2.575757575757576e-06,
"loss": 0.8575,
"step": 17
},
{
"epoch": 0.0549618320610687,
"grad_norm": 8.674266815185547,
"learning_rate": 2.7272727272727272e-06,
"loss": 0.7819,
"step": 18
},
{
"epoch": 0.05801526717557252,
"grad_norm": 5.270506381988525,
"learning_rate": 2.8787878787878793e-06,
"loss": 0.7184,
"step": 19
},
{
"epoch": 0.061068702290076333,
"grad_norm": 5.437349796295166,
"learning_rate": 3.0303030303030305e-06,
"loss": 0.6554,
"step": 20
},
{
"epoch": 0.06412213740458016,
"grad_norm": 4.164523601531982,
"learning_rate": 3.181818181818182e-06,
"loss": 0.6201,
"step": 21
},
{
"epoch": 0.06717557251908397,
"grad_norm": 3.023132085800171,
"learning_rate": 3.3333333333333333e-06,
"loss": 0.5653,
"step": 22
},
{
"epoch": 0.07022900763358779,
"grad_norm": 2.127342700958252,
"learning_rate": 3.4848484848484854e-06,
"loss": 0.5665,
"step": 23
},
{
"epoch": 0.0732824427480916,
"grad_norm": 1.689924716949463,
"learning_rate": 3.6363636363636366e-06,
"loss": 0.5848,
"step": 24
},
{
"epoch": 0.07633587786259542,
"grad_norm": 1.6041245460510254,
"learning_rate": 3.7878787878787882e-06,
"loss": 0.5708,
"step": 25
},
{
"epoch": 0.07938931297709924,
"grad_norm": 1.5720741748809814,
"learning_rate": 3.93939393939394e-06,
"loss": 0.5099,
"step": 26
},
{
"epoch": 0.08244274809160305,
"grad_norm": 2.095895290374756,
"learning_rate": 4.0909090909090915e-06,
"loss": 0.5879,
"step": 27
},
{
"epoch": 0.08549618320610687,
"grad_norm": 1.2914080619812012,
"learning_rate": 4.242424242424243e-06,
"loss": 0.5144,
"step": 28
},
{
"epoch": 0.08854961832061069,
"grad_norm": 1.2830662727355957,
"learning_rate": 4.393939393939394e-06,
"loss": 0.4861,
"step": 29
},
{
"epoch": 0.0916030534351145,
"grad_norm": 1.399788737297058,
"learning_rate": 4.5454545454545455e-06,
"loss": 0.4993,
"step": 30
},
{
"epoch": 0.09465648854961832,
"grad_norm": 1.5020956993103027,
"learning_rate": 4.696969696969698e-06,
"loss": 0.5615,
"step": 31
},
{
"epoch": 0.09770992366412214,
"grad_norm": 2.041203498840332,
"learning_rate": 4.848484848484849e-06,
"loss": 0.5903,
"step": 32
},
{
"epoch": 0.10076335877862595,
"grad_norm": 1.7165460586547852,
"learning_rate": 5e-06,
"loss": 0.484,
"step": 33
},
{
"epoch": 0.10381679389312977,
"grad_norm": 1.8483400344848633,
"learning_rate": 5.151515151515152e-06,
"loss": 0.5128,
"step": 34
},
{
"epoch": 0.10687022900763359,
"grad_norm": 2.1284639835357666,
"learning_rate": 5.303030303030303e-06,
"loss": 0.451,
"step": 35
},
{
"epoch": 0.1099236641221374,
"grad_norm": 2.3383638858795166,
"learning_rate": 5.4545454545454545e-06,
"loss": 0.5044,
"step": 36
},
{
"epoch": 0.11297709923664122,
"grad_norm": 1.5781283378601074,
"learning_rate": 5.606060606060606e-06,
"loss": 0.5028,
"step": 37
},
{
"epoch": 0.11603053435114503,
"grad_norm": 2.2045655250549316,
"learning_rate": 5.7575757575757586e-06,
"loss": 0.4562,
"step": 38
},
{
"epoch": 0.11908396946564885,
"grad_norm": 2.074035406112671,
"learning_rate": 5.90909090909091e-06,
"loss": 0.5005,
"step": 39
},
{
"epoch": 0.12213740458015267,
"grad_norm": 1.141032099723816,
"learning_rate": 6.060606060606061e-06,
"loss": 0.4874,
"step": 40
},
{
"epoch": 0.1251908396946565,
"grad_norm": 1.3975690603256226,
"learning_rate": 6.212121212121213e-06,
"loss": 0.4943,
"step": 41
},
{
"epoch": 0.1282442748091603,
"grad_norm": 1.1290738582611084,
"learning_rate": 6.363636363636364e-06,
"loss": 0.4428,
"step": 42
},
{
"epoch": 0.13129770992366413,
"grad_norm": 1.0739902257919312,
"learning_rate": 6.515151515151516e-06,
"loss": 0.4629,
"step": 43
},
{
"epoch": 0.13435114503816795,
"grad_norm": 1.7415887117385864,
"learning_rate": 6.666666666666667e-06,
"loss": 0.4987,
"step": 44
},
{
"epoch": 0.13740458015267176,
"grad_norm": 1.0889703035354614,
"learning_rate": 6.818181818181818e-06,
"loss": 0.4682,
"step": 45
},
{
"epoch": 0.14045801526717558,
"grad_norm": 1.2024023532867432,
"learning_rate": 6.969696969696971e-06,
"loss": 0.4471,
"step": 46
},
{
"epoch": 0.1435114503816794,
"grad_norm": 1.38139009475708,
"learning_rate": 7.121212121212122e-06,
"loss": 0.447,
"step": 47
},
{
"epoch": 0.1465648854961832,
"grad_norm": 1.282612919807434,
"learning_rate": 7.272727272727273e-06,
"loss": 0.4857,
"step": 48
},
{
"epoch": 0.14961832061068703,
"grad_norm": 1.529642105102539,
"learning_rate": 7.424242424242425e-06,
"loss": 0.4579,
"step": 49
},
{
"epoch": 0.15267175572519084,
"grad_norm": 1.8860386610031128,
"learning_rate": 7.5757575757575764e-06,
"loss": 0.5266,
"step": 50
},
{
"epoch": 0.15572519083969466,
"grad_norm": 1.9453023672103882,
"learning_rate": 7.727272727272727e-06,
"loss": 0.484,
"step": 51
},
{
"epoch": 0.15877862595419848,
"grad_norm": 31.375944137573242,
"learning_rate": 7.87878787878788e-06,
"loss": 0.4532,
"step": 52
},
{
"epoch": 0.1618320610687023,
"grad_norm": 1.2086973190307617,
"learning_rate": 8.03030303030303e-06,
"loss": 0.4521,
"step": 53
},
{
"epoch": 0.1648854961832061,
"grad_norm": 1.663839340209961,
"learning_rate": 8.181818181818183e-06,
"loss": 0.4692,
"step": 54
},
{
"epoch": 0.16793893129770993,
"grad_norm": 2.2267777919769287,
"learning_rate": 8.333333333333334e-06,
"loss": 0.5101,
"step": 55
},
{
"epoch": 0.17099236641221374,
"grad_norm": 1.555634617805481,
"learning_rate": 8.484848484848486e-06,
"loss": 0.502,
"step": 56
},
{
"epoch": 0.17404580152671756,
"grad_norm": 1.7097787857055664,
"learning_rate": 8.636363636363637e-06,
"loss": 0.4759,
"step": 57
},
{
"epoch": 0.17709923664122137,
"grad_norm": 1.5883866548538208,
"learning_rate": 8.787878787878788e-06,
"loss": 0.4683,
"step": 58
},
{
"epoch": 0.1801526717557252,
"grad_norm": 1.1990655660629272,
"learning_rate": 8.93939393939394e-06,
"loss": 0.4998,
"step": 59
},
{
"epoch": 0.183206106870229,
"grad_norm": 1.2467869520187378,
"learning_rate": 9.090909090909091e-06,
"loss": 0.5001,
"step": 60
},
{
"epoch": 0.18625954198473282,
"grad_norm": 1.0018348693847656,
"learning_rate": 9.242424242424244e-06,
"loss": 0.4284,
"step": 61
},
{
"epoch": 0.18931297709923664,
"grad_norm": 1.5363768339157104,
"learning_rate": 9.393939393939396e-06,
"loss": 0.4618,
"step": 62
},
{
"epoch": 0.19236641221374046,
"grad_norm": 1.2682244777679443,
"learning_rate": 9.545454545454547e-06,
"loss": 0.4797,
"step": 63
},
{
"epoch": 0.19541984732824427,
"grad_norm": 1.0340406894683838,
"learning_rate": 9.696969696969698e-06,
"loss": 0.4116,
"step": 64
},
{
"epoch": 0.1984732824427481,
"grad_norm": 1.2107707262039185,
"learning_rate": 9.84848484848485e-06,
"loss": 0.4446,
"step": 65
},
{
"epoch": 0.2015267175572519,
"grad_norm": 1.917484164237976,
"learning_rate": 1e-05,
"loss": 0.4401,
"step": 66
},
{
"epoch": 0.20458015267175572,
"grad_norm": 1.0186892747879028,
"learning_rate": 9.99999759644146e-06,
"loss": 0.4571,
"step": 67
},
{
"epoch": 0.20763358778625954,
"grad_norm": 1.811120629310608,
"learning_rate": 9.999990385768144e-06,
"loss": 0.4801,
"step": 68
},
{
"epoch": 0.21068702290076335,
"grad_norm": 1.4150516986846924,
"learning_rate": 9.999978367986988e-06,
"loss": 0.4814,
"step": 69
},
{
"epoch": 0.21374045801526717,
"grad_norm": 1.160577416419983,
"learning_rate": 9.999961543109546e-06,
"loss": 0.4648,
"step": 70
},
{
"epoch": 0.216793893129771,
"grad_norm": 1.380012035369873,
"learning_rate": 9.999939911151992e-06,
"loss": 0.4738,
"step": 71
},
{
"epoch": 0.2198473282442748,
"grad_norm": 1.7027848958969116,
"learning_rate": 9.999913472135126e-06,
"loss": 0.4172,
"step": 72
},
{
"epoch": 0.22290076335877862,
"grad_norm": 1.519327998161316,
"learning_rate": 9.999882226084366e-06,
"loss": 0.4686,
"step": 73
},
{
"epoch": 0.22595419847328244,
"grad_norm": 1.2961336374282837,
"learning_rate": 9.999846173029752e-06,
"loss": 0.4336,
"step": 74
},
{
"epoch": 0.22900763358778625,
"grad_norm": 0.9381375312805176,
"learning_rate": 9.999805313005946e-06,
"loss": 0.453,
"step": 75
},
{
"epoch": 0.23206106870229007,
"grad_norm": 1.2200775146484375,
"learning_rate": 9.999759646052234e-06,
"loss": 0.4973,
"step": 76
},
{
"epoch": 0.23511450381679388,
"grad_norm": 1.5415514707565308,
"learning_rate": 9.99970917221252e-06,
"loss": 0.4504,
"step": 77
},
{
"epoch": 0.2381679389312977,
"grad_norm": 1.6960675716400146,
"learning_rate": 9.99965389153533e-06,
"loss": 0.4643,
"step": 78
},
{
"epoch": 0.24122137404580152,
"grad_norm": 2.4953086376190186,
"learning_rate": 9.999593804073812e-06,
"loss": 0.4624,
"step": 79
},
{
"epoch": 0.24427480916030533,
"grad_norm": 1.677355408668518,
"learning_rate": 9.999528909885738e-06,
"loss": 0.3933,
"step": 80
},
{
"epoch": 0.24732824427480915,
"grad_norm": 1.5514272451400757,
"learning_rate": 9.999459209033495e-06,
"loss": 0.4733,
"step": 81
},
{
"epoch": 0.250381679389313,
"grad_norm": 1.7533841133117676,
"learning_rate": 9.999384701584098e-06,
"loss": 0.4599,
"step": 82
},
{
"epoch": 0.2534351145038168,
"grad_norm": 1.3905091285705566,
"learning_rate": 9.99930538760918e-06,
"loss": 0.4467,
"step": 83
},
{
"epoch": 0.2564885496183206,
"grad_norm": 0.9444634318351746,
"learning_rate": 9.999221267184993e-06,
"loss": 0.4141,
"step": 84
},
{
"epoch": 0.2595419847328244,
"grad_norm": 1.6794573068618774,
"learning_rate": 9.999132340392416e-06,
"loss": 0.4982,
"step": 85
},
{
"epoch": 0.26259541984732826,
"grad_norm": 1.0468450784683228,
"learning_rate": 9.999038607316942e-06,
"loss": 0.4327,
"step": 86
},
{
"epoch": 0.26564885496183205,
"grad_norm": 1.217337965965271,
"learning_rate": 9.998940068048688e-06,
"loss": 0.4825,
"step": 87
},
{
"epoch": 0.2687022900763359,
"grad_norm": 1.1232513189315796,
"learning_rate": 9.998836722682397e-06,
"loss": 0.4949,
"step": 88
},
{
"epoch": 0.2717557251908397,
"grad_norm": 1.0501331090927124,
"learning_rate": 9.998728571317422e-06,
"loss": 0.4668,
"step": 89
},
{
"epoch": 0.2748091603053435,
"grad_norm": 1.012088656425476,
"learning_rate": 9.998615614057743e-06,
"loss": 0.433,
"step": 90
},
{
"epoch": 0.2778625954198473,
"grad_norm": 1.293199062347412,
"learning_rate": 9.998497851011963e-06,
"loss": 0.4527,
"step": 91
},
{
"epoch": 0.28091603053435116,
"grad_norm": 1.868419885635376,
"learning_rate": 9.998375282293298e-06,
"loss": 0.4281,
"step": 92
},
{
"epoch": 0.28396946564885495,
"grad_norm": 1.1763111352920532,
"learning_rate": 9.998247908019594e-06,
"loss": 0.4874,
"step": 93
},
{
"epoch": 0.2870229007633588,
"grad_norm": 1.0736606121063232,
"learning_rate": 9.998115728313305e-06,
"loss": 0.4912,
"step": 94
},
{
"epoch": 0.2900763358778626,
"grad_norm": 1.1527585983276367,
"learning_rate": 9.997978743301516e-06,
"loss": 0.4501,
"step": 95
},
{
"epoch": 0.2931297709923664,
"grad_norm": 1.920807123184204,
"learning_rate": 9.997836953115927e-06,
"loss": 0.4431,
"step": 96
},
{
"epoch": 0.2961832061068702,
"grad_norm": 1.6428192853927612,
"learning_rate": 9.997690357892857e-06,
"loss": 0.4427,
"step": 97
},
{
"epoch": 0.29923664122137406,
"grad_norm": 1.0213465690612793,
"learning_rate": 9.997538957773248e-06,
"loss": 0.4367,
"step": 98
},
{
"epoch": 0.30229007633587784,
"grad_norm": 0.9431554675102234,
"learning_rate": 9.997382752902658e-06,
"loss": 0.4033,
"step": 99
},
{
"epoch": 0.3053435114503817,
"grad_norm": 1.0948545932769775,
"learning_rate": 9.997221743431267e-06,
"loss": 0.4287,
"step": 100
},
{
"epoch": 0.3083969465648855,
"grad_norm": 0.9768729209899902,
"learning_rate": 9.997055929513873e-06,
"loss": 0.4127,
"step": 101
},
{
"epoch": 0.3114503816793893,
"grad_norm": 1.0006322860717773,
"learning_rate": 9.996885311309892e-06,
"loss": 0.4278,
"step": 102
},
{
"epoch": 0.3145038167938931,
"grad_norm": 0.9815725088119507,
"learning_rate": 9.996709888983362e-06,
"loss": 0.4151,
"step": 103
},
{
"epoch": 0.31755725190839695,
"grad_norm": 1.643265962600708,
"learning_rate": 9.99652966270294e-06,
"loss": 0.4664,
"step": 104
},
{
"epoch": 0.32061068702290074,
"grad_norm": 1.4411566257476807,
"learning_rate": 9.996344632641895e-06,
"loss": 0.459,
"step": 105
},
{
"epoch": 0.3236641221374046,
"grad_norm": 0.8882512450218201,
"learning_rate": 9.996154798978122e-06,
"loss": 0.4399,
"step": 106
},
{
"epoch": 0.3267175572519084,
"grad_norm": 1.2299996614456177,
"learning_rate": 9.995960161894132e-06,
"loss": 0.4745,
"step": 107
},
{
"epoch": 0.3297709923664122,
"grad_norm": 1.3094323873519897,
"learning_rate": 9.995760721577053e-06,
"loss": 0.4488,
"step": 108
},
{
"epoch": 0.332824427480916,
"grad_norm": 1.5007655620574951,
"learning_rate": 9.99555647821863e-06,
"loss": 0.4445,
"step": 109
},
{
"epoch": 0.33587786259541985,
"grad_norm": 1.6499916315078735,
"learning_rate": 9.99534743201523e-06,
"loss": 0.4815,
"step": 110
},
{
"epoch": 0.33893129770992364,
"grad_norm": 1.3988540172576904,
"learning_rate": 9.995133583167833e-06,
"loss": 0.4401,
"step": 111
},
{
"epoch": 0.3419847328244275,
"grad_norm": 1.21137273311615,
"learning_rate": 9.99491493188204e-06,
"loss": 0.4817,
"step": 112
},
{
"epoch": 0.3450381679389313,
"grad_norm": 1.1858094930648804,
"learning_rate": 9.994691478368067e-06,
"loss": 0.4377,
"step": 113
},
{
"epoch": 0.3480916030534351,
"grad_norm": 1.7524734735488892,
"learning_rate": 9.994463222840748e-06,
"loss": 0.4705,
"step": 114
},
{
"epoch": 0.3511450381679389,
"grad_norm": 1.1640723943710327,
"learning_rate": 9.994230165519529e-06,
"loss": 0.4671,
"step": 115
},
{
"epoch": 0.35419847328244275,
"grad_norm": 1.045408010482788,
"learning_rate": 9.993992306628481e-06,
"loss": 0.4551,
"step": 116
},
{
"epoch": 0.35725190839694654,
"grad_norm": 0.8883363008499146,
"learning_rate": 9.993749646396286e-06,
"loss": 0.4585,
"step": 117
},
{
"epoch": 0.3603053435114504,
"grad_norm": 1.1517438888549805,
"learning_rate": 9.993502185056244e-06,
"loss": 0.4713,
"step": 118
},
{
"epoch": 0.36335877862595417,
"grad_norm": 0.9445165991783142,
"learning_rate": 9.993249922846269e-06,
"loss": 0.4407,
"step": 119
},
{
"epoch": 0.366412213740458,
"grad_norm": 0.9066264033317566,
"learning_rate": 9.992992860008893e-06,
"loss": 0.4367,
"step": 120
},
{
"epoch": 0.36946564885496186,
"grad_norm": 1.3719621896743774,
"learning_rate": 9.99273099679126e-06,
"loss": 0.4219,
"step": 121
},
{
"epoch": 0.37251908396946565,
"grad_norm": 0.8308991193771362,
"learning_rate": 9.992464333445134e-06,
"loss": 0.4226,
"step": 122
},
{
"epoch": 0.3755725190839695,
"grad_norm": 1.1011483669281006,
"learning_rate": 9.99219287022689e-06,
"loss": 0.4465,
"step": 123
},
{
"epoch": 0.3786259541984733,
"grad_norm": 1.1907440423965454,
"learning_rate": 9.99191660739752e-06,
"loss": 0.4981,
"step": 124
},
{
"epoch": 0.3816793893129771,
"grad_norm": 0.7825012803077698,
"learning_rate": 9.991635545222628e-06,
"loss": 0.4107,
"step": 125
},
{
"epoch": 0.3847328244274809,
"grad_norm": 1.0008180141448975,
"learning_rate": 9.991349683972435e-06,
"loss": 0.4376,
"step": 126
},
{
"epoch": 0.38778625954198476,
"grad_norm": 0.8535720109939575,
"learning_rate": 9.991059023921773e-06,
"loss": 0.4226,
"step": 127
},
{
"epoch": 0.39083969465648855,
"grad_norm": 1.3896106481552124,
"learning_rate": 9.990763565350092e-06,
"loss": 0.4507,
"step": 128
},
{
"epoch": 0.3938931297709924,
"grad_norm": 1.0249606370925903,
"learning_rate": 9.990463308541452e-06,
"loss": 0.4319,
"step": 129
},
{
"epoch": 0.3969465648854962,
"grad_norm": 1.1196545362472534,
"learning_rate": 9.990158253784525e-06,
"loss": 0.4879,
"step": 130
},
{
"epoch": 0.4,
"grad_norm": 1.000165581703186,
"learning_rate": 9.989848401372602e-06,
"loss": 0.4801,
"step": 131
},
{
"epoch": 0.4030534351145038,
"grad_norm": 0.9226934313774109,
"learning_rate": 9.989533751603578e-06,
"loss": 0.3995,
"step": 132
},
{
"epoch": 0.40610687022900765,
"grad_norm": 1.3084008693695068,
"learning_rate": 9.989214304779965e-06,
"loss": 0.4261,
"step": 133
},
{
"epoch": 0.40916030534351144,
"grad_norm": 0.8433472514152527,
"learning_rate": 9.988890061208889e-06,
"loss": 0.4286,
"step": 134
},
{
"epoch": 0.4122137404580153,
"grad_norm": 1.1123720407485962,
"learning_rate": 9.988561021202083e-06,
"loss": 0.4492,
"step": 135
},
{
"epoch": 0.4152671755725191,
"grad_norm": 1.216487169265747,
"learning_rate": 9.988227185075897e-06,
"loss": 0.4379,
"step": 136
},
{
"epoch": 0.4183206106870229,
"grad_norm": 0.9052311778068542,
"learning_rate": 9.987888553151285e-06,
"loss": 0.4645,
"step": 137
},
{
"epoch": 0.4213740458015267,
"grad_norm": 0.9832118153572083,
"learning_rate": 9.987545125753818e-06,
"loss": 0.4657,
"step": 138
},
{
"epoch": 0.42442748091603055,
"grad_norm": 0.8079437613487244,
"learning_rate": 9.987196903213677e-06,
"loss": 0.4261,
"step": 139
},
{
"epoch": 0.42748091603053434,
"grad_norm": 1.068222165107727,
"learning_rate": 9.986843885865649e-06,
"loss": 0.4281,
"step": 140
},
{
"epoch": 0.4305343511450382,
"grad_norm": 1.4247034788131714,
"learning_rate": 9.986486074049131e-06,
"loss": 0.4554,
"step": 141
},
{
"epoch": 0.433587786259542,
"grad_norm": 1.5078891515731812,
"learning_rate": 9.986123468108134e-06,
"loss": 0.5014,
"step": 142
},
{
"epoch": 0.4366412213740458,
"grad_norm": 1.5330584049224854,
"learning_rate": 9.985756068391276e-06,
"loss": 0.4162,
"step": 143
},
{
"epoch": 0.4396946564885496,
"grad_norm": 0.8555817008018494,
"learning_rate": 9.985383875251783e-06,
"loss": 0.3968,
"step": 144
},
{
"epoch": 0.44274809160305345,
"grad_norm": 1.1150386333465576,
"learning_rate": 9.985006889047492e-06,
"loss": 0.422,
"step": 145
},
{
"epoch": 0.44580152671755724,
"grad_norm": 0.9068059325218201,
"learning_rate": 9.984625110140844e-06,
"loss": 0.4756,
"step": 146
},
{
"epoch": 0.4488549618320611,
"grad_norm": 1.2768193483352661,
"learning_rate": 9.98423853889889e-06,
"loss": 0.4278,
"step": 147
},
{
"epoch": 0.45190839694656487,
"grad_norm": 1.2756924629211426,
"learning_rate": 9.983847175693291e-06,
"loss": 0.4828,
"step": 148
},
{
"epoch": 0.4549618320610687,
"grad_norm": 1.4357906579971313,
"learning_rate": 9.983451020900312e-06,
"loss": 0.4131,
"step": 149
},
{
"epoch": 0.4580152671755725,
"grad_norm": 0.8544045090675354,
"learning_rate": 9.983050074900824e-06,
"loss": 0.4327,
"step": 150
},
{
"epoch": 0.46106870229007635,
"grad_norm": 1.3559682369232178,
"learning_rate": 9.982644338080308e-06,
"loss": 0.4565,
"step": 151
},
{
"epoch": 0.46412213740458014,
"grad_norm": 1.153304100036621,
"learning_rate": 9.982233810828846e-06,
"loss": 0.432,
"step": 152
},
{
"epoch": 0.467175572519084,
"grad_norm": 0.9695574045181274,
"learning_rate": 9.98181849354113e-06,
"loss": 0.4475,
"step": 153
},
{
"epoch": 0.47022900763358777,
"grad_norm": 1.1988860368728638,
"learning_rate": 9.98139838661646e-06,
"loss": 0.4445,
"step": 154
},
{
"epoch": 0.4732824427480916,
"grad_norm": 1.272664189338684,
"learning_rate": 9.980973490458728e-06,
"loss": 0.3876,
"step": 155
},
{
"epoch": 0.4763358778625954,
"grad_norm": 0.7643194198608398,
"learning_rate": 9.980543805476447e-06,
"loss": 0.4209,
"step": 156
},
{
"epoch": 0.47938931297709925,
"grad_norm": 0.8956263065338135,
"learning_rate": 9.980109332082722e-06,
"loss": 0.4408,
"step": 157
},
{
"epoch": 0.48244274809160304,
"grad_norm": 0.8804344534873962,
"learning_rate": 9.979670070695265e-06,
"loss": 0.4238,
"step": 158
},
{
"epoch": 0.4854961832061069,
"grad_norm": 0.9882742166519165,
"learning_rate": 9.979226021736396e-06,
"loss": 0.4255,
"step": 159
},
{
"epoch": 0.48854961832061067,
"grad_norm": 0.7737629413604736,
"learning_rate": 9.978777185633032e-06,
"loss": 0.418,
"step": 160
},
{
"epoch": 0.4916030534351145,
"grad_norm": 1.0155868530273438,
"learning_rate": 9.978323562816693e-06,
"loss": 0.4738,
"step": 161
},
{
"epoch": 0.4946564885496183,
"grad_norm": 0.869758665561676,
"learning_rate": 9.977865153723508e-06,
"loss": 0.4386,
"step": 162
},
{
"epoch": 0.49770992366412214,
"grad_norm": 0.9655186533927917,
"learning_rate": 9.977401958794194e-06,
"loss": 0.4488,
"step": 163
},
{
"epoch": 0.500763358778626,
"grad_norm": 0.7864688038825989,
"learning_rate": 9.976933978474085e-06,
"loss": 0.4511,
"step": 164
},
{
"epoch": 0.5038167938931297,
"grad_norm": 0.8401108980178833,
"learning_rate": 9.976461213213104e-06,
"loss": 0.4275,
"step": 165
},
{
"epoch": 0.5068702290076336,
"grad_norm": 1.1969828605651855,
"learning_rate": 9.97598366346578e-06,
"loss": 0.4429,
"step": 166
},
{
"epoch": 0.5099236641221374,
"grad_norm": 0.9852338433265686,
"learning_rate": 9.975501329691241e-06,
"loss": 0.382,
"step": 167
},
{
"epoch": 0.5129770992366413,
"grad_norm": 1.4056986570358276,
"learning_rate": 9.975014212353212e-06,
"loss": 0.4087,
"step": 168
},
{
"epoch": 0.516030534351145,
"grad_norm": 0.8455936908721924,
"learning_rate": 9.974522311920021e-06,
"loss": 0.395,
"step": 169
},
{
"epoch": 0.5190839694656488,
"grad_norm": 1.071169137954712,
"learning_rate": 9.974025628864592e-06,
"loss": 0.4299,
"step": 170
},
{
"epoch": 0.5221374045801527,
"grad_norm": 1.0493682622909546,
"learning_rate": 9.973524163664447e-06,
"loss": 0.4453,
"step": 171
},
{
"epoch": 0.5251908396946565,
"grad_norm": 0.9125475287437439,
"learning_rate": 9.973017916801708e-06,
"loss": 0.4538,
"step": 172
},
{
"epoch": 0.5282442748091603,
"grad_norm": 1.427983283996582,
"learning_rate": 9.972506888763092e-06,
"loss": 0.4426,
"step": 173
},
{
"epoch": 0.5312977099236641,
"grad_norm": 0.9919382333755493,
"learning_rate": 9.971991080039912e-06,
"loss": 0.4399,
"step": 174
},
{
"epoch": 0.5343511450381679,
"grad_norm": 0.8641317486763,
"learning_rate": 9.971470491128077e-06,
"loss": 0.4265,
"step": 175
},
{
"epoch": 0.5374045801526718,
"grad_norm": 0.825204074382782,
"learning_rate": 9.9709451225281e-06,
"loss": 0.4389,
"step": 176
},
{
"epoch": 0.5404580152671755,
"grad_norm": 0.8316844701766968,
"learning_rate": 9.970414974745077e-06,
"loss": 0.5158,
"step": 177
},
{
"epoch": 0.5435114503816794,
"grad_norm": 0.8684625029563904,
"learning_rate": 9.969880048288704e-06,
"loss": 0.4525,
"step": 178
},
{
"epoch": 0.5465648854961832,
"grad_norm": 1.0494869947433472,
"learning_rate": 9.969340343673277e-06,
"loss": 0.4309,
"step": 179
},
{
"epoch": 0.549618320610687,
"grad_norm": 0.87712162733078,
"learning_rate": 9.968795861417676e-06,
"loss": 0.4163,
"step": 180
},
{
"epoch": 0.5526717557251909,
"grad_norm": 1.0175992250442505,
"learning_rate": 9.96824660204538e-06,
"loss": 0.4177,
"step": 181
},
{
"epoch": 0.5557251908396946,
"grad_norm": 1.1146421432495117,
"learning_rate": 9.96769256608446e-06,
"loss": 0.4421,
"step": 182
},
{
"epoch": 0.5587786259541985,
"grad_norm": 0.8894767165184021,
"learning_rate": 9.967133754067581e-06,
"loss": 0.3736,
"step": 183
},
{
"epoch": 0.5618320610687023,
"grad_norm": 0.8820218443870544,
"learning_rate": 9.966570166531997e-06,
"loss": 0.4445,
"step": 184
},
{
"epoch": 0.5648854961832062,
"grad_norm": 0.9425957798957825,
"learning_rate": 9.966001804019552e-06,
"loss": 0.4773,
"step": 185
},
{
"epoch": 0.5679389312977099,
"grad_norm": 0.8843741416931152,
"learning_rate": 9.965428667076687e-06,
"loss": 0.4366,
"step": 186
},
{
"epoch": 0.5709923664122137,
"grad_norm": 1.057478904724121,
"learning_rate": 9.964850756254426e-06,
"loss": 0.4153,
"step": 187
},
{
"epoch": 0.5740458015267176,
"grad_norm": 0.6932744979858398,
"learning_rate": 9.964268072108385e-06,
"loss": 0.389,
"step": 188
},
{
"epoch": 0.5770992366412214,
"grad_norm": 0.9970691800117493,
"learning_rate": 9.963680615198774e-06,
"loss": 0.4463,
"step": 189
},
{
"epoch": 0.5801526717557252,
"grad_norm": 0.868103563785553,
"learning_rate": 9.963088386090386e-06,
"loss": 0.4787,
"step": 190
},
{
"epoch": 0.583206106870229,
"grad_norm": 0.9566022157669067,
"learning_rate": 9.962491385352601e-06,
"loss": 0.4465,
"step": 191
},
{
"epoch": 0.5862595419847328,
"grad_norm": 1.0130680799484253,
"learning_rate": 9.961889613559396e-06,
"loss": 0.4198,
"step": 192
},
{
"epoch": 0.5893129770992367,
"grad_norm": 0.7694997191429138,
"learning_rate": 9.961283071289323e-06,
"loss": 0.3911,
"step": 193
},
{
"epoch": 0.5923664122137404,
"grad_norm": 1.107438087463379,
"learning_rate": 9.960671759125529e-06,
"loss": 0.465,
"step": 194
},
{
"epoch": 0.5954198473282443,
"grad_norm": 1.2282586097717285,
"learning_rate": 9.960055677655743e-06,
"loss": 0.3544,
"step": 195
},
{
"epoch": 0.5984732824427481,
"grad_norm": 0.799738347530365,
"learning_rate": 9.959434827472278e-06,
"loss": 0.4209,
"step": 196
},
{
"epoch": 0.601526717557252,
"grad_norm": 1.0112799406051636,
"learning_rate": 9.958809209172038e-06,
"loss": 0.4517,
"step": 197
},
{
"epoch": 0.6045801526717557,
"grad_norm": 1.1886570453643799,
"learning_rate": 9.958178823356503e-06,
"loss": 0.4336,
"step": 198
},
{
"epoch": 0.6076335877862595,
"grad_norm": 0.986052393913269,
"learning_rate": 9.957543670631743e-06,
"loss": 0.4359,
"step": 199
},
{
"epoch": 0.6106870229007634,
"grad_norm": 0.9521136283874512,
"learning_rate": 9.956903751608409e-06,
"loss": 0.4315,
"step": 200
},
{
"epoch": 0.6137404580152672,
"grad_norm": 0.9395862817764282,
"learning_rate": 9.956259066901733e-06,
"loss": 0.4161,
"step": 201
},
{
"epoch": 0.616793893129771,
"grad_norm": 0.7328633666038513,
"learning_rate": 9.95560961713153e-06,
"loss": 0.4499,
"step": 202
},
{
"epoch": 0.6198473282442748,
"grad_norm": 0.7823469638824463,
"learning_rate": 9.954955402922195e-06,
"loss": 0.4524,
"step": 203
},
{
"epoch": 0.6229007633587786,
"grad_norm": 0.7669162154197693,
"learning_rate": 9.954296424902709e-06,
"loss": 0.4104,
"step": 204
},
{
"epoch": 0.6259541984732825,
"grad_norm": 0.9158356785774231,
"learning_rate": 9.953632683706624e-06,
"loss": 0.3961,
"step": 205
},
{
"epoch": 0.6290076335877862,
"grad_norm": 1.0056244134902954,
"learning_rate": 9.95296417997208e-06,
"loss": 0.4439,
"step": 206
},
{
"epoch": 0.6320610687022901,
"grad_norm": 0.7471880912780762,
"learning_rate": 9.95229091434179e-06,
"loss": 0.3987,
"step": 207
},
{
"epoch": 0.6351145038167939,
"grad_norm": 0.6590108871459961,
"learning_rate": 9.95161288746305e-06,
"loss": 0.3796,
"step": 208
},
{
"epoch": 0.6381679389312978,
"grad_norm": 0.9819900989532471,
"learning_rate": 9.950930099987728e-06,
"loss": 0.421,
"step": 209
},
{
"epoch": 0.6412213740458015,
"grad_norm": 0.8542637228965759,
"learning_rate": 9.950242552572272e-06,
"loss": 0.4201,
"step": 210
},
{
"epoch": 0.6442748091603053,
"grad_norm": 0.7002215385437012,
"learning_rate": 9.949550245877708e-06,
"loss": 0.4095,
"step": 211
},
{
"epoch": 0.6473282442748092,
"grad_norm": 1.038978099822998,
"learning_rate": 9.948853180569635e-06,
"loss": 0.4619,
"step": 212
},
{
"epoch": 0.650381679389313,
"grad_norm": 0.7824814915657043,
"learning_rate": 9.948151357318228e-06,
"loss": 0.4418,
"step": 213
},
{
"epoch": 0.6534351145038167,
"grad_norm": 0.6705781817436218,
"learning_rate": 9.947444776798235e-06,
"loss": 0.3886,
"step": 214
},
{
"epoch": 0.6564885496183206,
"grad_norm": 1.4235440492630005,
"learning_rate": 9.946733439688982e-06,
"loss": 0.4247,
"step": 215
},
{
"epoch": 0.6595419847328244,
"grad_norm": 1.1739765405654907,
"learning_rate": 9.946017346674362e-06,
"loss": 0.4472,
"step": 216
},
{
"epoch": 0.6625954198473283,
"grad_norm": 0.766029417514801,
"learning_rate": 9.945296498442845e-06,
"loss": 0.4228,
"step": 217
},
{
"epoch": 0.665648854961832,
"grad_norm": 0.8829736709594727,
"learning_rate": 9.944570895687471e-06,
"loss": 0.3922,
"step": 218
},
{
"epoch": 0.6687022900763359,
"grad_norm": 0.9619208574295044,
"learning_rate": 9.943840539105853e-06,
"loss": 0.4503,
"step": 219
},
{
"epoch": 0.6717557251908397,
"grad_norm": 0.9508197903633118,
"learning_rate": 9.943105429400171e-06,
"loss": 0.4254,
"step": 220
},
{
"epoch": 0.6748091603053435,
"grad_norm": 1.1876355409622192,
"learning_rate": 9.942365567277178e-06,
"loss": 0.4649,
"step": 221
},
{
"epoch": 0.6778625954198473,
"grad_norm": 0.7676303386688232,
"learning_rate": 9.941620953448195e-06,
"loss": 0.4613,
"step": 222
},
{
"epoch": 0.6809160305343511,
"grad_norm": 3.2201735973358154,
"learning_rate": 9.940871588629108e-06,
"loss": 0.4383,
"step": 223
},
{
"epoch": 0.683969465648855,
"grad_norm": 0.652260959148407,
"learning_rate": 9.940117473540377e-06,
"loss": 0.4095,
"step": 224
},
{
"epoch": 0.6870229007633588,
"grad_norm": 0.7163742780685425,
"learning_rate": 9.939358608907026e-06,
"loss": 0.3928,
"step": 225
},
{
"epoch": 0.6900763358778625,
"grad_norm": 0.8624964356422424,
"learning_rate": 9.938594995458644e-06,
"loss": 0.4704,
"step": 226
},
{
"epoch": 0.6931297709923664,
"grad_norm": 1.098352074623108,
"learning_rate": 9.937826633929388e-06,
"loss": 0.416,
"step": 227
},
{
"epoch": 0.6961832061068702,
"grad_norm": 0.8400312066078186,
"learning_rate": 9.937053525057977e-06,
"loss": 0.477,
"step": 228
},
{
"epoch": 0.6992366412213741,
"grad_norm": 0.9534816741943359,
"learning_rate": 9.936275669587697e-06,
"loss": 0.4659,
"step": 229
},
{
"epoch": 0.7022900763358778,
"grad_norm": 0.7687210440635681,
"learning_rate": 9.935493068266396e-06,
"loss": 0.4648,
"step": 230
},
{
"epoch": 0.7053435114503817,
"grad_norm": 1.0610638856887817,
"learning_rate": 9.934705721846487e-06,
"loss": 0.4417,
"step": 231
},
{
"epoch": 0.7083969465648855,
"grad_norm": 0.649358332157135,
"learning_rate": 9.933913631084942e-06,
"loss": 0.4245,
"step": 232
},
{
"epoch": 0.7114503816793893,
"grad_norm": 0.7866373658180237,
"learning_rate": 9.933116796743294e-06,
"loss": 0.4502,
"step": 233
},
{
"epoch": 0.7145038167938931,
"grad_norm": 0.9572409391403198,
"learning_rate": 9.932315219587641e-06,
"loss": 0.3648,
"step": 234
},
{
"epoch": 0.7175572519083969,
"grad_norm": 0.7827803492546082,
"learning_rate": 9.931508900388635e-06,
"loss": 0.4785,
"step": 235
},
{
"epoch": 0.7206106870229008,
"grad_norm": 0.8105997443199158,
"learning_rate": 9.930697839921496e-06,
"loss": 0.4174,
"step": 236
},
{
"epoch": 0.7236641221374046,
"grad_norm": 1.267929196357727,
"learning_rate": 9.92988203896599e-06,
"loss": 0.4316,
"step": 237
},
{
"epoch": 0.7267175572519083,
"grad_norm": 1.0768303871154785,
"learning_rate": 9.929061498306448e-06,
"loss": 0.4047,
"step": 238
},
{
"epoch": 0.7297709923664122,
"grad_norm": 0.7765565514564514,
"learning_rate": 9.92823621873176e-06,
"loss": 0.4125,
"step": 239
},
{
"epoch": 0.732824427480916,
"grad_norm": 1.8877193927764893,
"learning_rate": 9.927406201035368e-06,
"loss": 0.4009,
"step": 240
},
{
"epoch": 0.7358778625954199,
"grad_norm": 1.2411426305770874,
"learning_rate": 9.926571446015271e-06,
"loss": 0.4586,
"step": 241
},
{
"epoch": 0.7389312977099237,
"grad_norm": 0.7646974921226501,
"learning_rate": 9.92573195447402e-06,
"loss": 0.3959,
"step": 242
},
{
"epoch": 0.7419847328244275,
"grad_norm": 0.7394517660140991,
"learning_rate": 9.924887727218724e-06,
"loss": 0.434,
"step": 243
},
{
"epoch": 0.7450381679389313,
"grad_norm": 1.249360203742981,
"learning_rate": 9.924038765061042e-06,
"loss": 0.4017,
"step": 244
},
{
"epoch": 0.7480916030534351,
"grad_norm": 1.315171718597412,
"learning_rate": 9.923185068817184e-06,
"loss": 0.4549,
"step": 245
},
{
"epoch": 0.751145038167939,
"grad_norm": 1.220099687576294,
"learning_rate": 9.922326639307918e-06,
"loss": 0.384,
"step": 246
},
{
"epoch": 0.7541984732824427,
"grad_norm": 0.902491569519043,
"learning_rate": 9.921463477358555e-06,
"loss": 0.4325,
"step": 247
},
{
"epoch": 0.7572519083969466,
"grad_norm": 1.9143394231796265,
"learning_rate": 9.920595583798959e-06,
"loss": 0.4474,
"step": 248
},
{
"epoch": 0.7603053435114504,
"grad_norm": 0.8942150473594666,
"learning_rate": 9.919722959463545e-06,
"loss": 0.4119,
"step": 249
},
{
"epoch": 0.7633587786259542,
"grad_norm": 0.843867301940918,
"learning_rate": 9.918845605191274e-06,
"loss": 0.454,
"step": 250
},
{
"epoch": 0.766412213740458,
"grad_norm": 1.2122715711593628,
"learning_rate": 9.917963521825653e-06,
"loss": 0.432,
"step": 251
},
{
"epoch": 0.7694656488549618,
"grad_norm": 0.9266217947006226,
"learning_rate": 9.917076710214739e-06,
"loss": 0.4474,
"step": 252
},
{
"epoch": 0.7725190839694657,
"grad_norm": 1.258772611618042,
"learning_rate": 9.916185171211135e-06,
"loss": 0.4236,
"step": 253
},
{
"epoch": 0.7755725190839695,
"grad_norm": 0.9514210224151611,
"learning_rate": 9.915288905671986e-06,
"loss": 0.4508,
"step": 254
},
{
"epoch": 0.7786259541984732,
"grad_norm": 1.780994176864624,
"learning_rate": 9.914387914458983e-06,
"loss": 0.4089,
"step": 255
},
{
"epoch": 0.7816793893129771,
"grad_norm": 0.9609716534614563,
"learning_rate": 9.913482198438357e-06,
"loss": 0.4062,
"step": 256
},
{
"epoch": 0.7847328244274809,
"grad_norm": 0.8735237717628479,
"learning_rate": 9.912571758480892e-06,
"loss": 0.4037,
"step": 257
},
{
"epoch": 0.7877862595419848,
"grad_norm": 0.8674682378768921,
"learning_rate": 9.911656595461899e-06,
"loss": 0.3985,
"step": 258
},
{
"epoch": 0.7908396946564885,
"grad_norm": 2.76031494140625,
"learning_rate": 9.910736710261238e-06,
"loss": 0.4157,
"step": 259
},
{
"epoch": 0.7938931297709924,
"grad_norm": 0.9837795495986938,
"learning_rate": 9.909812103763312e-06,
"loss": 0.4096,
"step": 260
},
{
"epoch": 0.7969465648854962,
"grad_norm": 0.7090920805931091,
"learning_rate": 9.908882776857057e-06,
"loss": 0.3918,
"step": 261
},
{
"epoch": 0.8,
"grad_norm": 0.9074463844299316,
"learning_rate": 9.90794873043595e-06,
"loss": 0.4744,
"step": 262
},
{
"epoch": 0.8030534351145038,
"grad_norm": 0.8347420692443848,
"learning_rate": 9.907009965398005e-06,
"loss": 0.4444,
"step": 263
},
{
"epoch": 0.8061068702290076,
"grad_norm": 1.0150105953216553,
"learning_rate": 9.906066482645774e-06,
"loss": 0.4281,
"step": 264
},
{
"epoch": 0.8091603053435115,
"grad_norm": 1.1246689558029175,
"learning_rate": 9.905118283086341e-06,
"loss": 0.4567,
"step": 265
},
{
"epoch": 0.8122137404580153,
"grad_norm": 0.8622527122497559,
"learning_rate": 9.904165367631329e-06,
"loss": 0.4515,
"step": 266
},
{
"epoch": 0.815267175572519,
"grad_norm": 0.8956477046012878,
"learning_rate": 9.903207737196892e-06,
"loss": 0.4366,
"step": 267
},
{
"epoch": 0.8183206106870229,
"grad_norm": 0.899315595626831,
"learning_rate": 9.902245392703719e-06,
"loss": 0.4171,
"step": 268
},
{
"epoch": 0.8213740458015267,
"grad_norm": 0.7298351526260376,
"learning_rate": 9.901278335077031e-06,
"loss": 0.4202,
"step": 269
},
{
"epoch": 0.8244274809160306,
"grad_norm": 0.950832188129425,
"learning_rate": 9.900306565246579e-06,
"loss": 0.426,
"step": 270
},
{
"epoch": 0.8274809160305343,
"grad_norm": 0.95467209815979,
"learning_rate": 9.899330084146646e-06,
"loss": 0.4366,
"step": 271
},
{
"epoch": 0.8305343511450382,
"grad_norm": 0.7660781741142273,
"learning_rate": 9.898348892716042e-06,
"loss": 0.4407,
"step": 272
},
{
"epoch": 0.833587786259542,
"grad_norm": 0.8174218535423279,
"learning_rate": 9.89736299189811e-06,
"loss": 0.4316,
"step": 273
},
{
"epoch": 0.8366412213740458,
"grad_norm": 0.7284188866615295,
"learning_rate": 9.896372382640718e-06,
"loss": 0.3861,
"step": 274
},
{
"epoch": 0.8396946564885496,
"grad_norm": 1.0651788711547852,
"learning_rate": 9.895377065896259e-06,
"loss": 0.4333,
"step": 275
},
{
"epoch": 0.8427480916030534,
"grad_norm": 0.9174491167068481,
"learning_rate": 9.894377042621654e-06,
"loss": 0.4467,
"step": 276
},
{
"epoch": 0.8458015267175573,
"grad_norm": 1.482020378112793,
"learning_rate": 9.89337231377835e-06,
"loss": 0.3747,
"step": 277
},
{
"epoch": 0.8488549618320611,
"grad_norm": 0.6591337323188782,
"learning_rate": 9.892362880332316e-06,
"loss": 0.4147,
"step": 278
},
{
"epoch": 0.8519083969465648,
"grad_norm": 0.6442196369171143,
"learning_rate": 9.891348743254046e-06,
"loss": 0.4069,
"step": 279
},
{
"epoch": 0.8549618320610687,
"grad_norm": 0.7692182660102844,
"learning_rate": 9.890329903518554e-06,
"loss": 0.4117,
"step": 280
},
{
"epoch": 0.8580152671755725,
"grad_norm": 0.7226704955101013,
"learning_rate": 9.889306362105377e-06,
"loss": 0.4367,
"step": 281
},
{
"epoch": 0.8610687022900764,
"grad_norm": 1.0700533390045166,
"learning_rate": 9.888278119998573e-06,
"loss": 0.3988,
"step": 282
},
{
"epoch": 0.8641221374045801,
"grad_norm": 1.5681344270706177,
"learning_rate": 9.887245178186715e-06,
"loss": 0.4167,
"step": 283
},
{
"epoch": 0.867175572519084,
"grad_norm": 1.0044033527374268,
"learning_rate": 9.886207537662899e-06,
"loss": 0.4742,
"step": 284
},
{
"epoch": 0.8702290076335878,
"grad_norm": 0.6717744469642639,
"learning_rate": 9.885165199424738e-06,
"loss": 0.4404,
"step": 285
},
{
"epoch": 0.8732824427480916,
"grad_norm": 0.9106744527816772,
"learning_rate": 9.884118164474359e-06,
"loss": 0.4748,
"step": 286
},
{
"epoch": 0.8763358778625954,
"grad_norm": 0.8167893290519714,
"learning_rate": 9.883066433818404e-06,
"loss": 0.4601,
"step": 287
},
{
"epoch": 0.8793893129770992,
"grad_norm": 1.2043495178222656,
"learning_rate": 9.882010008468038e-06,
"loss": 0.4383,
"step": 288
},
{
"epoch": 0.8824427480916031,
"grad_norm": 1.3362419605255127,
"learning_rate": 9.880948889438923e-06,
"loss": 0.4157,
"step": 289
},
{
"epoch": 0.8854961832061069,
"grad_norm": 0.9374348521232605,
"learning_rate": 9.879883077751255e-06,
"loss": 0.39,
"step": 290
},
{
"epoch": 0.8885496183206106,
"grad_norm": 0.939093291759491,
"learning_rate": 9.878812574429722e-06,
"loss": 0.4546,
"step": 291
},
{
"epoch": 0.8916030534351145,
"grad_norm": 0.7758873701095581,
"learning_rate": 9.877737380503534e-06,
"loss": 0.4539,
"step": 292
},
{
"epoch": 0.8946564885496183,
"grad_norm": 0.6936675906181335,
"learning_rate": 9.876657497006408e-06,
"loss": 0.3882,
"step": 293
},
{
"epoch": 0.8977099236641222,
"grad_norm": 1.144876480102539,
"learning_rate": 9.875572924976568e-06,
"loss": 0.4461,
"step": 294
},
{
"epoch": 0.9007633587786259,
"grad_norm": 0.8967270255088806,
"learning_rate": 9.874483665456746e-06,
"loss": 0.423,
"step": 295
},
{
"epoch": 0.9038167938931297,
"grad_norm": 0.77662193775177,
"learning_rate": 9.873389719494186e-06,
"loss": 0.4049,
"step": 296
},
{
"epoch": 0.9068702290076336,
"grad_norm": 0.8233901858329773,
"learning_rate": 9.87229108814063e-06,
"loss": 0.4294,
"step": 297
},
{
"epoch": 0.9099236641221374,
"grad_norm": 0.8502475619316101,
"learning_rate": 9.871187772452327e-06,
"loss": 0.4552,
"step": 298
},
{
"epoch": 0.9129770992366413,
"grad_norm": 0.6888173818588257,
"learning_rate": 9.870079773490033e-06,
"loss": 0.4183,
"step": 299
},
{
"epoch": 0.916030534351145,
"grad_norm": 0.8409755229949951,
"learning_rate": 9.868967092319003e-06,
"loss": 0.4211,
"step": 300
},
{
"epoch": 0.9190839694656489,
"grad_norm": 0.873248279094696,
"learning_rate": 9.867849730008994e-06,
"loss": 0.4645,
"step": 301
},
{
"epoch": 0.9221374045801527,
"grad_norm": 0.7314611673355103,
"learning_rate": 9.866727687634266e-06,
"loss": 0.3957,
"step": 302
},
{
"epoch": 0.9251908396946565,
"grad_norm": 1.0233852863311768,
"learning_rate": 9.865600966273576e-06,
"loss": 0.4668,
"step": 303
},
{
"epoch": 0.9282442748091603,
"grad_norm": 1.1218703985214233,
"learning_rate": 9.86446956701018e-06,
"loss": 0.4613,
"step": 304
},
{
"epoch": 0.9312977099236641,
"grad_norm": 0.7411604523658752,
"learning_rate": 9.86333349093183e-06,
"loss": 0.4002,
"step": 305
},
{
"epoch": 0.934351145038168,
"grad_norm": 0.8801771998405457,
"learning_rate": 9.86219273913078e-06,
"loss": 0.4846,
"step": 306
},
{
"epoch": 0.9374045801526718,
"grad_norm": 0.7666848301887512,
"learning_rate": 9.861047312703772e-06,
"loss": 0.4648,
"step": 307
},
{
"epoch": 0.9404580152671755,
"grad_norm": 0.8234301805496216,
"learning_rate": 9.859897212752049e-06,
"loss": 0.4135,
"step": 308
},
{
"epoch": 0.9435114503816794,
"grad_norm": 0.8180258274078369,
"learning_rate": 9.858742440381343e-06,
"loss": 0.4563,
"step": 309
},
{
"epoch": 0.9465648854961832,
"grad_norm": 0.7705149054527283,
"learning_rate": 9.857582996701878e-06,
"loss": 0.4111,
"step": 310
},
{
"epoch": 0.9496183206106871,
"grad_norm": 0.6623134016990662,
"learning_rate": 9.856418882828368e-06,
"loss": 0.4096,
"step": 311
},
{
"epoch": 0.9526717557251908,
"grad_norm": 0.694754421710968,
"learning_rate": 9.855250099880026e-06,
"loss": 0.4953,
"step": 312
},
{
"epoch": 0.9557251908396946,
"grad_norm": 0.7474799752235413,
"learning_rate": 9.854076648980543e-06,
"loss": 0.4018,
"step": 313
},
{
"epoch": 0.9587786259541985,
"grad_norm": 0.8107720017433167,
"learning_rate": 9.852898531258102e-06,
"loss": 0.4105,
"step": 314
},
{
"epoch": 0.9618320610687023,
"grad_norm": 0.7008181810379028,
"learning_rate": 9.851715747845372e-06,
"loss": 0.4417,
"step": 315
},
{
"epoch": 0.9648854961832061,
"grad_norm": 0.874311089515686,
"learning_rate": 9.850528299879513e-06,
"loss": 0.4983,
"step": 316
},
{
"epoch": 0.9679389312977099,
"grad_norm": 0.6784942746162415,
"learning_rate": 9.84933618850216e-06,
"loss": 0.431,
"step": 317
},
{
"epoch": 0.9709923664122138,
"grad_norm": 1.2246615886688232,
"learning_rate": 9.848139414859441e-06,
"loss": 0.425,
"step": 318
},
{
"epoch": 0.9740458015267176,
"grad_norm": 0.9437500238418579,
"learning_rate": 9.84693798010196e-06,
"loss": 0.4106,
"step": 319
},
{
"epoch": 0.9770992366412213,
"grad_norm": 1.246093511581421,
"learning_rate": 9.845731885384806e-06,
"loss": 0.4124,
"step": 320
},
{
"epoch": 0.9801526717557252,
"grad_norm": 0.8225599527359009,
"learning_rate": 9.844521131867546e-06,
"loss": 0.4222,
"step": 321
},
{
"epoch": 0.983206106870229,
"grad_norm": 0.804071843624115,
"learning_rate": 9.843305720714227e-06,
"loss": 0.4297,
"step": 322
},
{
"epoch": 0.9862595419847329,
"grad_norm": 1.0068086385726929,
"learning_rate": 9.842085653093372e-06,
"loss": 0.4181,
"step": 323
},
{
"epoch": 0.9893129770992366,
"grad_norm": 0.8640692234039307,
"learning_rate": 9.840860930177984e-06,
"loss": 0.4723,
"step": 324
},
{
"epoch": 0.9923664122137404,
"grad_norm": 0.835490345954895,
"learning_rate": 9.83963155314554e-06,
"loss": 0.4281,
"step": 325
},
{
"epoch": 0.9954198473282443,
"grad_norm": 0.8215777277946472,
"learning_rate": 9.838397523177993e-06,
"loss": 0.4257,
"step": 326
},
{
"epoch": 0.9984732824427481,
"grad_norm": 0.6857399940490723,
"learning_rate": 9.837158841461767e-06,
"loss": 0.3843,
"step": 327
},
{
"epoch": 1.001526717557252,
"grad_norm": 0.8264266848564148,
"learning_rate": 9.835915509187759e-06,
"loss": 0.4005,
"step": 328
},
{
"epoch": 1.0045801526717557,
"grad_norm": 0.7838205695152283,
"learning_rate": 9.834667527551341e-06,
"loss": 0.4026,
"step": 329
},
{
"epoch": 1.0076335877862594,
"grad_norm": 0.6124852299690247,
"learning_rate": 9.833414897752346e-06,
"loss": 0.3899,
"step": 330
},
{
"epoch": 1.0106870229007634,
"grad_norm": 0.6304617524147034,
"learning_rate": 9.832157620995088e-06,
"loss": 0.3771,
"step": 331
},
{
"epoch": 1.0137404580152671,
"grad_norm": 0.6198051571846008,
"learning_rate": 9.830895698488341e-06,
"loss": 0.3412,
"step": 332
},
{
"epoch": 1.016793893129771,
"grad_norm": 0.5759274959564209,
"learning_rate": 9.829629131445342e-06,
"loss": 0.3186,
"step": 333
},
{
"epoch": 1.0198473282442748,
"grad_norm": 0.5976182222366333,
"learning_rate": 9.828357921083803e-06,
"loss": 0.3863,
"step": 334
},
{
"epoch": 1.0229007633587786,
"grad_norm": 0.5571991205215454,
"learning_rate": 9.827082068625893e-06,
"loss": 0.3516,
"step": 335
},
{
"epoch": 1.0259541984732825,
"grad_norm": 0.6673964858055115,
"learning_rate": 9.825801575298248e-06,
"loss": 0.328,
"step": 336
},
{
"epoch": 1.0290076335877862,
"grad_norm": 0.7016775012016296,
"learning_rate": 9.824516442331963e-06,
"loss": 0.3493,
"step": 337
},
{
"epoch": 1.03206106870229,
"grad_norm": 0.6329908967018127,
"learning_rate": 9.823226670962598e-06,
"loss": 0.3676,
"step": 338
},
{
"epoch": 1.035114503816794,
"grad_norm": 0.8894324898719788,
"learning_rate": 9.821932262430164e-06,
"loss": 0.3821,
"step": 339
},
{
"epoch": 1.0381679389312977,
"grad_norm": 0.7619789838790894,
"learning_rate": 9.82063321797914e-06,
"loss": 0.3847,
"step": 340
},
{
"epoch": 1.0412213740458016,
"grad_norm": 1.1191351413726807,
"learning_rate": 9.819329538858458e-06,
"loss": 0.355,
"step": 341
},
{
"epoch": 1.0442748091603054,
"grad_norm": 0.7350041270256042,
"learning_rate": 9.818021226321502e-06,
"loss": 0.3354,
"step": 342
},
{
"epoch": 1.047328244274809,
"grad_norm": 0.6686594486236572,
"learning_rate": 9.816708281626116e-06,
"loss": 0.3602,
"step": 343
},
{
"epoch": 1.050381679389313,
"grad_norm": 0.772079586982727,
"learning_rate": 9.815390706034598e-06,
"loss": 0.3997,
"step": 344
},
{
"epoch": 1.0534351145038168,
"grad_norm": 0.6307922005653381,
"learning_rate": 9.814068500813692e-06,
"loss": 0.393,
"step": 345
},
{
"epoch": 1.0564885496183205,
"grad_norm": 0.7725545167922974,
"learning_rate": 9.812741667234599e-06,
"loss": 0.3759,
"step": 346
},
{
"epoch": 1.0595419847328245,
"grad_norm": 0.8863770365715027,
"learning_rate": 9.811410206572972e-06,
"loss": 0.3831,
"step": 347
},
{
"epoch": 1.0625954198473282,
"grad_norm": 0.7568827271461487,
"learning_rate": 9.8100741201089e-06,
"loss": 0.3662,
"step": 348
},
{
"epoch": 1.0656488549618321,
"grad_norm": 0.7503966689109802,
"learning_rate": 9.808733409126934e-06,
"loss": 0.3218,
"step": 349
},
{
"epoch": 1.0687022900763359,
"grad_norm": 0.5649372339248657,
"learning_rate": 9.807388074916064e-06,
"loss": 0.341,
"step": 350
},
{
"epoch": 1.0717557251908396,
"grad_norm": 0.649509608745575,
"learning_rate": 9.806038118769724e-06,
"loss": 0.361,
"step": 351
},
{
"epoch": 1.0748091603053436,
"grad_norm": 1.0561178922653198,
"learning_rate": 9.804683541985796e-06,
"loss": 0.3788,
"step": 352
},
{
"epoch": 1.0778625954198473,
"grad_norm": 0.6720334887504578,
"learning_rate": 9.803324345866599e-06,
"loss": 0.3382,
"step": 353
},
{
"epoch": 1.0809160305343513,
"grad_norm": 0.5649891495704651,
"learning_rate": 9.801960531718898e-06,
"loss": 0.3464,
"step": 354
},
{
"epoch": 1.083969465648855,
"grad_norm": 0.6778721809387207,
"learning_rate": 9.800592100853894e-06,
"loss": 0.3178,
"step": 355
},
{
"epoch": 1.0870229007633587,
"grad_norm": 0.6055477261543274,
"learning_rate": 9.79921905458723e-06,
"loss": 0.3981,
"step": 356
},
{
"epoch": 1.0900763358778627,
"grad_norm": 0.6305366158485413,
"learning_rate": 9.797841394238987e-06,
"loss": 0.3345,
"step": 357
},
{
"epoch": 1.0931297709923664,
"grad_norm": 0.7315085530281067,
"learning_rate": 9.796459121133675e-06,
"loss": 0.3073,
"step": 358
},
{
"epoch": 1.0961832061068701,
"grad_norm": 1.1462393999099731,
"learning_rate": 9.795072236600247e-06,
"loss": 0.3716,
"step": 359
},
{
"epoch": 1.099236641221374,
"grad_norm": 1.0482473373413086,
"learning_rate": 9.793680741972084e-06,
"loss": 0.3352,
"step": 360
},
{
"epoch": 1.1022900763358778,
"grad_norm": 0.89040607213974,
"learning_rate": 9.792284638587005e-06,
"loss": 0.3501,
"step": 361
},
{
"epoch": 1.1053435114503816,
"grad_norm": 1.0140873193740845,
"learning_rate": 9.790883927787254e-06,
"loss": 0.334,
"step": 362
},
{
"epoch": 1.1083969465648855,
"grad_norm": 0.7952347993850708,
"learning_rate": 9.789478610919508e-06,
"loss": 0.393,
"step": 363
},
{
"epoch": 1.1114503816793893,
"grad_norm": 0.6698347926139832,
"learning_rate": 9.78806868933487e-06,
"loss": 0.3467,
"step": 364
},
{
"epoch": 1.1145038167938932,
"grad_norm": 0.8345444202423096,
"learning_rate": 9.786654164388873e-06,
"loss": 0.3524,
"step": 365
},
{
"epoch": 1.117557251908397,
"grad_norm": 1.0636674165725708,
"learning_rate": 9.785235037441473e-06,
"loss": 0.3474,
"step": 366
},
{
"epoch": 1.1206106870229007,
"grad_norm": 0.6235001683235168,
"learning_rate": 9.783811309857057e-06,
"loss": 0.2783,
"step": 367
},
{
"epoch": 1.1236641221374046,
"grad_norm": 0.7113050222396851,
"learning_rate": 9.782382983004424e-06,
"loss": 0.3098,
"step": 368
},
{
"epoch": 1.1267175572519084,
"grad_norm": 0.5404059290885925,
"learning_rate": 9.780950058256802e-06,
"loss": 0.3233,
"step": 369
},
{
"epoch": 1.1297709923664123,
"grad_norm": 0.7777256369590759,
"learning_rate": 9.779512536991839e-06,
"loss": 0.319,
"step": 370
},
{
"epoch": 1.132824427480916,
"grad_norm": 0.6770484447479248,
"learning_rate": 9.778070420591603e-06,
"loss": 0.3604,
"step": 371
},
{
"epoch": 1.1358778625954198,
"grad_norm": 0.8889312744140625,
"learning_rate": 9.77662371044258e-06,
"loss": 0.3868,
"step": 372
},
{
"epoch": 1.1389312977099237,
"grad_norm": 0.9467371702194214,
"learning_rate": 9.775172407935664e-06,
"loss": 0.3422,
"step": 373
},
{
"epoch": 1.1419847328244275,
"grad_norm": 0.7681576013565063,
"learning_rate": 9.773716514466179e-06,
"loss": 0.4031,
"step": 374
},
{
"epoch": 1.1450381679389312,
"grad_norm": 1.1023520231246948,
"learning_rate": 9.77225603143385e-06,
"loss": 0.3794,
"step": 375
},
{
"epoch": 1.1480916030534352,
"grad_norm": 0.6105406880378723,
"learning_rate": 9.770790960242821e-06,
"loss": 0.3413,
"step": 376
},
{
"epoch": 1.151145038167939,
"grad_norm": 0.7057470083236694,
"learning_rate": 9.769321302301648e-06,
"loss": 0.3705,
"step": 377
},
{
"epoch": 1.1541984732824426,
"grad_norm": 0.7157896757125854,
"learning_rate": 9.767847059023292e-06,
"loss": 0.3177,
"step": 378
},
{
"epoch": 1.1572519083969466,
"grad_norm": 0.8052981495857239,
"learning_rate": 9.766368231825126e-06,
"loss": 0.3546,
"step": 379
},
{
"epoch": 1.1603053435114503,
"grad_norm": 0.9875938892364502,
"learning_rate": 9.764884822128928e-06,
"loss": 0.3358,
"step": 380
},
{
"epoch": 1.1633587786259543,
"grad_norm": 1.0223547220230103,
"learning_rate": 9.763396831360884e-06,
"loss": 0.3753,
"step": 381
},
{
"epoch": 1.166412213740458,
"grad_norm": 0.6884990334510803,
"learning_rate": 9.761904260951583e-06,
"loss": 0.3788,
"step": 382
},
{
"epoch": 1.1694656488549617,
"grad_norm": 0.6606012582778931,
"learning_rate": 9.760407112336016e-06,
"loss": 0.3476,
"step": 383
},
{
"epoch": 1.1725190839694657,
"grad_norm": 0.6487988829612732,
"learning_rate": 9.75890538695358e-06,
"loss": 0.3519,
"step": 384
},
{
"epoch": 1.1755725190839694,
"grad_norm": 1.325270414352417,
"learning_rate": 9.757399086248062e-06,
"loss": 0.3519,
"step": 385
},
{
"epoch": 1.1786259541984734,
"grad_norm": 0.676222562789917,
"learning_rate": 9.755888211667663e-06,
"loss": 0.3285,
"step": 386
},
{
"epoch": 1.1816793893129771,
"grad_norm": 0.8031539916992188,
"learning_rate": 9.75437276466497e-06,
"loss": 0.3633,
"step": 387
},
{
"epoch": 1.1847328244274808,
"grad_norm": 1.066544771194458,
"learning_rate": 9.752852746696968e-06,
"loss": 0.3686,
"step": 388
},
{
"epoch": 1.1877862595419848,
"grad_norm": 0.7430751323699951,
"learning_rate": 9.751328159225037e-06,
"loss": 0.3171,
"step": 389
},
{
"epoch": 1.1908396946564885,
"grad_norm": 0.6246421337127686,
"learning_rate": 9.749799003714954e-06,
"loss": 0.3576,
"step": 390
},
{
"epoch": 1.1938931297709923,
"grad_norm": 0.6136408448219299,
"learning_rate": 9.748265281636885e-06,
"loss": 0.3801,
"step": 391
},
{
"epoch": 1.1969465648854962,
"grad_norm": 0.6370484232902527,
"learning_rate": 9.746726994465383e-06,
"loss": 0.3203,
"step": 392
},
{
"epoch": 1.2,
"grad_norm": 0.7168248295783997,
"learning_rate": 9.745184143679398e-06,
"loss": 0.3418,
"step": 393
},
{
"epoch": 1.203053435114504,
"grad_norm": 0.7240697145462036,
"learning_rate": 9.743636730762259e-06,
"loss": 0.3356,
"step": 394
},
{
"epoch": 1.2061068702290076,
"grad_norm": 1.0867228507995605,
"learning_rate": 9.742084757201684e-06,
"loss": 0.3462,
"step": 395
},
{
"epoch": 1.2091603053435114,
"grad_norm": 1.1310479640960693,
"learning_rate": 9.74052822448978e-06,
"loss": 0.3912,
"step": 396
},
{
"epoch": 1.2122137404580153,
"grad_norm": 0.9177300930023193,
"learning_rate": 9.738967134123035e-06,
"loss": 0.3816,
"step": 397
},
{
"epoch": 1.215267175572519,
"grad_norm": 0.6733918190002441,
"learning_rate": 9.737401487602314e-06,
"loss": 0.3581,
"step": 398
},
{
"epoch": 1.218320610687023,
"grad_norm": 0.8870444297790527,
"learning_rate": 9.735831286432869e-06,
"loss": 0.3869,
"step": 399
},
{
"epoch": 1.2213740458015268,
"grad_norm": 0.6876736283302307,
"learning_rate": 9.734256532124326e-06,
"loss": 0.3585,
"step": 400
},
{
"epoch": 1.2244274809160305,
"grad_norm": 0.8272992968559265,
"learning_rate": 9.732677226190692e-06,
"loss": 0.3411,
"step": 401
},
{
"epoch": 1.2274809160305344,
"grad_norm": 0.9379627108573914,
"learning_rate": 9.731093370150349e-06,
"loss": 0.3573,
"step": 402
},
{
"epoch": 1.2305343511450382,
"grad_norm": 0.6424528360366821,
"learning_rate": 9.729504965526053e-06,
"loss": 0.3452,
"step": 403
},
{
"epoch": 1.233587786259542,
"grad_norm": 0.7687745690345764,
"learning_rate": 9.727912013844933e-06,
"loss": 0.3478,
"step": 404
},
{
"epoch": 1.2366412213740459,
"grad_norm": 0.6921530961990356,
"learning_rate": 9.72631451663849e-06,
"loss": 0.323,
"step": 405
},
{
"epoch": 1.2396946564885496,
"grad_norm": 0.6351541876792908,
"learning_rate": 9.724712475442597e-06,
"loss": 0.343,
"step": 406
},
{
"epoch": 1.2427480916030533,
"grad_norm": 0.6916157007217407,
"learning_rate": 9.72310589179749e-06,
"loss": 0.3855,
"step": 407
},
{
"epoch": 1.2458015267175573,
"grad_norm": 0.6268352270126343,
"learning_rate": 9.721494767247779e-06,
"loss": 0.3317,
"step": 408
},
{
"epoch": 1.248854961832061,
"grad_norm": 0.9611676335334778,
"learning_rate": 9.719879103342438e-06,
"loss": 0.3616,
"step": 409
},
{
"epoch": 1.2519083969465647,
"grad_norm": 0.9179072380065918,
"learning_rate": 9.718258901634802e-06,
"loss": 0.3659,
"step": 410
},
{
"epoch": 1.2549618320610687,
"grad_norm": 0.8182265758514404,
"learning_rate": 9.71663416368257e-06,
"loss": 0.369,
"step": 411
},
{
"epoch": 1.2580152671755724,
"grad_norm": 0.5990933775901794,
"learning_rate": 9.715004891047805e-06,
"loss": 0.3512,
"step": 412
},
{
"epoch": 1.2610687022900764,
"grad_norm": 0.5766075253486633,
"learning_rate": 9.71337108529693e-06,
"loss": 0.3502,
"step": 413
},
{
"epoch": 1.2641221374045801,
"grad_norm": 0.7638534903526306,
"learning_rate": 9.71173274800072e-06,
"loss": 0.3797,
"step": 414
},
{
"epoch": 1.267175572519084,
"grad_norm": 1.3837302923202515,
"learning_rate": 9.71008988073431e-06,
"loss": 0.3428,
"step": 415
},
{
"epoch": 1.2702290076335878,
"grad_norm": 0.6435381770133972,
"learning_rate": 9.708442485077197e-06,
"loss": 0.3932,
"step": 416
},
{
"epoch": 1.2732824427480915,
"grad_norm": 0.6114059090614319,
"learning_rate": 9.70679056261322e-06,
"loss": 0.3462,
"step": 417
},
{
"epoch": 1.2763358778625955,
"grad_norm": 0.6542776823043823,
"learning_rate": 9.70513411493058e-06,
"loss": 0.3469,
"step": 418
},
{
"epoch": 1.2793893129770992,
"grad_norm": 0.6148584485054016,
"learning_rate": 9.70347314362182e-06,
"loss": 0.3195,
"step": 419
},
{
"epoch": 1.282442748091603,
"grad_norm": 0.6419680714607239,
"learning_rate": 9.70180765028384e-06,
"loss": 0.3197,
"step": 420
},
{
"epoch": 1.285496183206107,
"grad_norm": 0.6210622191429138,
"learning_rate": 9.700137636517884e-06,
"loss": 0.3435,
"step": 421
},
{
"epoch": 1.2885496183206107,
"grad_norm": 0.6903261542320251,
"learning_rate": 9.698463103929542e-06,
"loss": 0.3439,
"step": 422
},
{
"epoch": 1.2916030534351144,
"grad_norm": 0.8320783972740173,
"learning_rate": 9.696784054128749e-06,
"loss": 0.3299,
"step": 423
},
{
"epoch": 1.2946564885496183,
"grad_norm": 0.7238710522651672,
"learning_rate": 9.695100488729784e-06,
"loss": 0.3872,
"step": 424
},
{
"epoch": 1.297709923664122,
"grad_norm": 0.8117603659629822,
"learning_rate": 9.693412409351264e-06,
"loss": 0.2986,
"step": 425
},
{
"epoch": 1.300763358778626,
"grad_norm": 0.7673251032829285,
"learning_rate": 9.691719817616148e-06,
"loss": 0.3624,
"step": 426
},
{
"epoch": 1.3038167938931298,
"grad_norm": 0.6230213642120361,
"learning_rate": 9.690022715151734e-06,
"loss": 0.3394,
"step": 427
},
{
"epoch": 1.3068702290076337,
"grad_norm": 0.640009880065918,
"learning_rate": 9.688321103589659e-06,
"loss": 0.3569,
"step": 428
},
{
"epoch": 1.3099236641221375,
"grad_norm": 0.7415749430656433,
"learning_rate": 9.686614984565888e-06,
"loss": 0.3472,
"step": 429
},
{
"epoch": 1.3129770992366412,
"grad_norm": 0.6077396273612976,
"learning_rate": 9.684904359720724e-06,
"loss": 0.3517,
"step": 430
},
{
"epoch": 1.3160305343511451,
"grad_norm": 0.7327889800071716,
"learning_rate": 9.683189230698804e-06,
"loss": 0.362,
"step": 431
},
{
"epoch": 1.3190839694656489,
"grad_norm": 0.6880279183387756,
"learning_rate": 9.681469599149093e-06,
"loss": 0.3203,
"step": 432
},
{
"epoch": 1.3221374045801526,
"grad_norm": 1.0457184314727783,
"learning_rate": 9.679745466724884e-06,
"loss": 0.3579,
"step": 433
},
{
"epoch": 1.3251908396946566,
"grad_norm": 1.067544937133789,
"learning_rate": 9.678016835083798e-06,
"loss": 0.3969,
"step": 434
},
{
"epoch": 1.3282442748091603,
"grad_norm": 0.6936712861061096,
"learning_rate": 9.676283705887783e-06,
"loss": 0.3523,
"step": 435
},
{
"epoch": 1.331297709923664,
"grad_norm": 0.7560036182403564,
"learning_rate": 9.674546080803109e-06,
"loss": 0.3543,
"step": 436
},
{
"epoch": 1.334351145038168,
"grad_norm": 1.575924038887024,
"learning_rate": 9.67280396150037e-06,
"loss": 0.3442,
"step": 437
},
{
"epoch": 1.3374045801526717,
"grad_norm": 0.8994946479797363,
"learning_rate": 9.671057349654481e-06,
"loss": 0.301,
"step": 438
},
{
"epoch": 1.3404580152671755,
"grad_norm": 0.8520591259002686,
"learning_rate": 9.669306246944674e-06,
"loss": 0.3519,
"step": 439
},
{
"epoch": 1.3435114503816794,
"grad_norm": 0.7266104817390442,
"learning_rate": 9.6675506550545e-06,
"loss": 0.3038,
"step": 440
},
{
"epoch": 1.3465648854961831,
"grad_norm": 1.08512544631958,
"learning_rate": 9.66579057567183e-06,
"loss": 0.4029,
"step": 441
},
{
"epoch": 1.349618320610687,
"grad_norm": 0.6626537442207336,
"learning_rate": 9.66402601048884e-06,
"loss": 0.4311,
"step": 442
},
{
"epoch": 1.3526717557251908,
"grad_norm": 0.6997970342636108,
"learning_rate": 9.662256961202028e-06,
"loss": 0.3558,
"step": 443
},
{
"epoch": 1.3557251908396948,
"grad_norm": 0.6189729571342468,
"learning_rate": 9.660483429512198e-06,
"loss": 0.3539,
"step": 444
},
{
"epoch": 1.3587786259541985,
"grad_norm": 0.5804331302642822,
"learning_rate": 9.658705417124466e-06,
"loss": 0.3167,
"step": 445
},
{
"epoch": 1.3618320610687022,
"grad_norm": 0.5655621290206909,
"learning_rate": 9.656922925748254e-06,
"loss": 0.3895,
"step": 446
},
{
"epoch": 1.3648854961832062,
"grad_norm": 0.7266117334365845,
"learning_rate": 9.65513595709729e-06,
"loss": 0.3329,
"step": 447
},
{
"epoch": 1.36793893129771,
"grad_norm": 1.2477047443389893,
"learning_rate": 9.653344512889608e-06,
"loss": 0.3462,
"step": 448
},
{
"epoch": 1.3709923664122137,
"grad_norm": 0.8985768556594849,
"learning_rate": 9.651548594847546e-06,
"loss": 0.3353,
"step": 449
},
{
"epoch": 1.3740458015267176,
"grad_norm": 0.9904614090919495,
"learning_rate": 9.649748204697741e-06,
"loss": 0.4153,
"step": 450
},
{
"epoch": 1.3770992366412214,
"grad_norm": 0.6207300424575806,
"learning_rate": 9.647943344171129e-06,
"loss": 0.345,
"step": 451
},
{
"epoch": 1.380152671755725,
"grad_norm": 0.7198036909103394,
"learning_rate": 9.646134015002946e-06,
"loss": 0.3564,
"step": 452
},
{
"epoch": 1.383206106870229,
"grad_norm": 0.7422304749488831,
"learning_rate": 9.644320218932723e-06,
"loss": 0.3577,
"step": 453
},
{
"epoch": 1.3862595419847328,
"grad_norm": 0.700497031211853,
"learning_rate": 9.642501957704287e-06,
"loss": 0.316,
"step": 454
},
{
"epoch": 1.3893129770992365,
"grad_norm": 0.8094924688339233,
"learning_rate": 9.640679233065755e-06,
"loss": 0.3644,
"step": 455
},
{
"epoch": 1.3923664122137405,
"grad_norm": 0.7094489932060242,
"learning_rate": 9.63885204676954e-06,
"loss": 0.3828,
"step": 456
},
{
"epoch": 1.3954198473282442,
"grad_norm": 0.5725051760673523,
"learning_rate": 9.637020400572339e-06,
"loss": 0.3779,
"step": 457
},
{
"epoch": 1.3984732824427482,
"grad_norm": 0.8379393219947815,
"learning_rate": 9.63518429623514e-06,
"loss": 0.3612,
"step": 458
},
{
"epoch": 1.4015267175572519,
"grad_norm": 1.0108810663223267,
"learning_rate": 9.63334373552322e-06,
"loss": 0.3477,
"step": 459
},
{
"epoch": 1.4045801526717558,
"grad_norm": 0.6375267505645752,
"learning_rate": 9.631498720206132e-06,
"loss": 0.3174,
"step": 460
},
{
"epoch": 1.4076335877862596,
"grad_norm": 0.7903940081596375,
"learning_rate": 9.62964925205772e-06,
"loss": 0.4022,
"step": 461
},
{
"epoch": 1.4106870229007633,
"grad_norm": 0.7919008731842041,
"learning_rate": 9.627795332856107e-06,
"loss": 0.3428,
"step": 462
},
{
"epoch": 1.4137404580152673,
"grad_norm": 0.7458226084709167,
"learning_rate": 9.625936964383691e-06,
"loss": 0.3438,
"step": 463
},
{
"epoch": 1.416793893129771,
"grad_norm": 0.684517502784729,
"learning_rate": 9.624074148427154e-06,
"loss": 0.3271,
"step": 464
},
{
"epoch": 1.4198473282442747,
"grad_norm": 0.7255269885063171,
"learning_rate": 9.622206886777448e-06,
"loss": 0.3289,
"step": 465
},
{
"epoch": 1.4229007633587787,
"grad_norm": 0.7821673154830933,
"learning_rate": 9.620335181229805e-06,
"loss": 0.3364,
"step": 466
},
{
"epoch": 1.4259541984732824,
"grad_norm": 0.628267765045166,
"learning_rate": 9.618459033583725e-06,
"loss": 0.3392,
"step": 467
},
{
"epoch": 1.4290076335877862,
"grad_norm": 0.6381856203079224,
"learning_rate": 9.616578445642982e-06,
"loss": 0.3087,
"step": 468
},
{
"epoch": 1.43206106870229,
"grad_norm": 0.6881396770477295,
"learning_rate": 9.614693419215613e-06,
"loss": 0.3826,
"step": 469
},
{
"epoch": 1.4351145038167938,
"grad_norm": 0.6835957765579224,
"learning_rate": 9.612803956113932e-06,
"loss": 0.3571,
"step": 470
},
{
"epoch": 1.4381679389312976,
"grad_norm": 1.3688271045684814,
"learning_rate": 9.61091005815451e-06,
"loss": 0.3666,
"step": 471
},
{
"epoch": 1.4412213740458015,
"grad_norm": 0.574308454990387,
"learning_rate": 9.609011727158184e-06,
"loss": 0.3365,
"step": 472
},
{
"epoch": 1.4442748091603053,
"grad_norm": 0.62713223695755,
"learning_rate": 9.607108964950056e-06,
"loss": 0.3326,
"step": 473
},
{
"epoch": 1.4473282442748092,
"grad_norm": 0.9176129102706909,
"learning_rate": 9.605201773359485e-06,
"loss": 0.3564,
"step": 474
},
{
"epoch": 1.450381679389313,
"grad_norm": 0.6832491159439087,
"learning_rate": 9.603290154220091e-06,
"loss": 0.3642,
"step": 475
},
{
"epoch": 1.453435114503817,
"grad_norm": 0.8406944870948792,
"learning_rate": 9.601374109369746e-06,
"loss": 0.3538,
"step": 476
},
{
"epoch": 1.4564885496183206,
"grad_norm": 0.6097620725631714,
"learning_rate": 9.599453640650585e-06,
"loss": 0.3923,
"step": 477
},
{
"epoch": 1.4595419847328244,
"grad_norm": 0.7544072270393372,
"learning_rate": 9.59752874990899e-06,
"loss": 0.4026,
"step": 478
},
{
"epoch": 1.4625954198473283,
"grad_norm": 0.6778220534324646,
"learning_rate": 9.595599438995593e-06,
"loss": 0.3441,
"step": 479
},
{
"epoch": 1.465648854961832,
"grad_norm": 0.5954076051712036,
"learning_rate": 9.59366570976528e-06,
"loss": 0.372,
"step": 480
},
{
"epoch": 1.4687022900763358,
"grad_norm": 0.82410728931427,
"learning_rate": 9.591727564077189e-06,
"loss": 0.3127,
"step": 481
},
{
"epoch": 1.4717557251908397,
"grad_norm": 0.664159893989563,
"learning_rate": 9.589785003794692e-06,
"loss": 0.3273,
"step": 482
},
{
"epoch": 1.4748091603053435,
"grad_norm": 0.6926347613334656,
"learning_rate": 9.587838030785413e-06,
"loss": 0.3866,
"step": 483
},
{
"epoch": 1.4778625954198472,
"grad_norm": 0.6716482043266296,
"learning_rate": 9.585886646921221e-06,
"loss": 0.3263,
"step": 484
},
{
"epoch": 1.4809160305343512,
"grad_norm": 0.7961567044258118,
"learning_rate": 9.583930854078219e-06,
"loss": 0.3344,
"step": 485
},
{
"epoch": 1.483969465648855,
"grad_norm": 0.6689973473548889,
"learning_rate": 9.581970654136752e-06,
"loss": 0.3404,
"step": 486
},
{
"epoch": 1.4870229007633589,
"grad_norm": 0.7548951506614685,
"learning_rate": 9.580006048981403e-06,
"loss": 0.3357,
"step": 487
},
{
"epoch": 1.4900763358778626,
"grad_norm": 0.5724946856498718,
"learning_rate": 9.578037040500992e-06,
"loss": 0.2948,
"step": 488
},
{
"epoch": 1.4931297709923665,
"grad_norm": 1.1794706583023071,
"learning_rate": 9.576063630588563e-06,
"loss": 0.2979,
"step": 489
},
{
"epoch": 1.4961832061068703,
"grad_norm": 0.6707073450088501,
"learning_rate": 9.574085821141406e-06,
"loss": 0.3705,
"step": 490
},
{
"epoch": 1.499236641221374,
"grad_norm": 0.850101888179779,
"learning_rate": 9.572103614061029e-06,
"loss": 0.3614,
"step": 491
},
{
"epoch": 1.502290076335878,
"grad_norm": 0.7582584023475647,
"learning_rate": 9.570117011253173e-06,
"loss": 0.3481,
"step": 492
},
{
"epoch": 1.5053435114503817,
"grad_norm": 0.6907809376716614,
"learning_rate": 9.568126014627805e-06,
"loss": 0.3423,
"step": 493
},
{
"epoch": 1.5083969465648854,
"grad_norm": 0.7220117449760437,
"learning_rate": 9.566130626099118e-06,
"loss": 0.3532,
"step": 494
},
{
"epoch": 1.5114503816793894,
"grad_norm": 0.8386178016662598,
"learning_rate": 9.56413084758552e-06,
"loss": 0.3528,
"step": 495
},
{
"epoch": 1.5145038167938931,
"grad_norm": 0.8298983573913574,
"learning_rate": 9.562126681009649e-06,
"loss": 0.3457,
"step": 496
},
{
"epoch": 1.5175572519083969,
"grad_norm": 0.7928427457809448,
"learning_rate": 9.560118128298355e-06,
"loss": 0.3519,
"step": 497
},
{
"epoch": 1.5206106870229008,
"grad_norm": 0.6914782524108887,
"learning_rate": 9.55810519138271e-06,
"loss": 0.3422,
"step": 498
},
{
"epoch": 1.5236641221374045,
"grad_norm": 0.867933988571167,
"learning_rate": 9.556087872197997e-06,
"loss": 0.3707,
"step": 499
},
{
"epoch": 1.5267175572519083,
"grad_norm": 0.9586713314056396,
"learning_rate": 9.554066172683715e-06,
"loss": 0.3295,
"step": 500
},
{
"epoch": 1.5297709923664122,
"grad_norm": 0.6676278710365295,
"learning_rate": 9.552040094783575e-06,
"loss": 0.3599,
"step": 501
},
{
"epoch": 1.5328244274809162,
"grad_norm": 0.8502694368362427,
"learning_rate": 9.550009640445492e-06,
"loss": 0.3544,
"step": 502
},
{
"epoch": 1.5358778625954197,
"grad_norm": 0.8230335116386414,
"learning_rate": 9.547974811621594e-06,
"loss": 0.3534,
"step": 503
},
{
"epoch": 1.5389312977099237,
"grad_norm": 0.7689422369003296,
"learning_rate": 9.545935610268213e-06,
"loss": 0.2933,
"step": 504
},
{
"epoch": 1.5419847328244276,
"grad_norm": 0.7558886408805847,
"learning_rate": 9.543892038345885e-06,
"loss": 0.3512,
"step": 505
},
{
"epoch": 1.5450381679389313,
"grad_norm": 0.7310518026351929,
"learning_rate": 9.541844097819347e-06,
"loss": 0.3541,
"step": 506
},
{
"epoch": 1.548091603053435,
"grad_norm": 0.6750447154045105,
"learning_rate": 9.53979179065754e-06,
"loss": 0.3738,
"step": 507
},
{
"epoch": 1.551145038167939,
"grad_norm": 0.5590270161628723,
"learning_rate": 9.537735118833595e-06,
"loss": 0.3364,
"step": 508
},
{
"epoch": 1.5541984732824428,
"grad_norm": 0.667302668094635,
"learning_rate": 9.53567408432485e-06,
"loss": 0.3698,
"step": 509
},
{
"epoch": 1.5572519083969465,
"grad_norm": 0.8501421213150024,
"learning_rate": 9.533608689112827e-06,
"loss": 0.4109,
"step": 510
},
{
"epoch": 1.5603053435114504,
"grad_norm": 1.017300009727478,
"learning_rate": 9.531538935183252e-06,
"loss": 0.3671,
"step": 511
},
{
"epoch": 1.5633587786259542,
"grad_norm": 0.8065325021743774,
"learning_rate": 9.529464824526027e-06,
"loss": 0.3762,
"step": 512
},
{
"epoch": 1.566412213740458,
"grad_norm": 0.8767975568771362,
"learning_rate": 9.527386359135254e-06,
"loss": 0.3431,
"step": 513
},
{
"epoch": 1.5694656488549619,
"grad_norm": 0.7164638638496399,
"learning_rate": 9.525303541009218e-06,
"loss": 0.3858,
"step": 514
},
{
"epoch": 1.5725190839694656,
"grad_norm": 0.7097347974777222,
"learning_rate": 9.523216372150393e-06,
"loss": 0.3087,
"step": 515
},
{
"epoch": 1.5755725190839693,
"grad_norm": 0.654172956943512,
"learning_rate": 9.521124854565425e-06,
"loss": 0.3403,
"step": 516
},
{
"epoch": 1.5786259541984733,
"grad_norm": 0.7420234084129333,
"learning_rate": 9.519028990265153e-06,
"loss": 0.3569,
"step": 517
},
{
"epoch": 1.5816793893129772,
"grad_norm": 0.6324269771575928,
"learning_rate": 9.516928781264588e-06,
"loss": 0.3767,
"step": 518
},
{
"epoch": 1.5847328244274808,
"grad_norm": 2.079932689666748,
"learning_rate": 9.514824229582922e-06,
"loss": 0.3544,
"step": 519
},
{
"epoch": 1.5877862595419847,
"grad_norm": 0.525637686252594,
"learning_rate": 9.512715337243517e-06,
"loss": 0.3348,
"step": 520
},
{
"epoch": 1.5908396946564887,
"grad_norm": 0.7853607535362244,
"learning_rate": 9.510602106273914e-06,
"loss": 0.3584,
"step": 521
},
{
"epoch": 1.5938931297709924,
"grad_norm": 0.7089791893959045,
"learning_rate": 9.508484538705823e-06,
"loss": 0.38,
"step": 522
},
{
"epoch": 1.5969465648854961,
"grad_norm": 0.6065333485603333,
"learning_rate": 9.506362636575122e-06,
"loss": 0.3528,
"step": 523
},
{
"epoch": 1.6,
"grad_norm": 0.6693193912506104,
"learning_rate": 9.504236401921856e-06,
"loss": 0.3473,
"step": 524
},
{
"epoch": 1.6030534351145038,
"grad_norm": 0.8753147721290588,
"learning_rate": 9.50210583679024e-06,
"loss": 0.3586,
"step": 525
},
{
"epoch": 1.6061068702290076,
"grad_norm": 0.7226424813270569,
"learning_rate": 9.499970943228646e-06,
"loss": 0.3522,
"step": 526
},
{
"epoch": 1.6091603053435115,
"grad_norm": 0.672460675239563,
"learning_rate": 9.497831723289615e-06,
"loss": 0.3431,
"step": 527
},
{
"epoch": 1.6122137404580152,
"grad_norm": 0.6836286783218384,
"learning_rate": 9.495688179029838e-06,
"loss": 0.3965,
"step": 528
},
{
"epoch": 1.615267175572519,
"grad_norm": 0.7151774764060974,
"learning_rate": 9.493540312510173e-06,
"loss": 0.3791,
"step": 529
},
{
"epoch": 1.618320610687023,
"grad_norm": 0.5929725766181946,
"learning_rate": 9.491388125795623e-06,
"loss": 0.3398,
"step": 530
},
{
"epoch": 1.6213740458015267,
"grad_norm": 0.6961609125137329,
"learning_rate": 9.48923162095536e-06,
"loss": 0.3692,
"step": 531
},
{
"epoch": 1.6244274809160304,
"grad_norm": 0.7646557092666626,
"learning_rate": 9.487070800062689e-06,
"loss": 0.3279,
"step": 532
},
{
"epoch": 1.6274809160305344,
"grad_norm": 0.587727427482605,
"learning_rate": 9.48490566519508e-06,
"loss": 0.3422,
"step": 533
},
{
"epoch": 1.6305343511450383,
"grad_norm": 0.5824351906776428,
"learning_rate": 9.482736218434144e-06,
"loss": 0.3243,
"step": 534
},
{
"epoch": 1.6335877862595418,
"grad_norm": 0.6902999877929688,
"learning_rate": 9.480562461865634e-06,
"loss": 0.3366,
"step": 535
},
{
"epoch": 1.6366412213740458,
"grad_norm": 0.6403490304946899,
"learning_rate": 9.478384397579452e-06,
"loss": 0.3853,
"step": 536
},
{
"epoch": 1.6396946564885497,
"grad_norm": 0.5644521713256836,
"learning_rate": 9.476202027669644e-06,
"loss": 0.3181,
"step": 537
},
{
"epoch": 1.6427480916030535,
"grad_norm": 0.8028286099433899,
"learning_rate": 9.474015354234385e-06,
"loss": 0.3509,
"step": 538
},
{
"epoch": 1.6458015267175572,
"grad_norm": 0.8197933435440063,
"learning_rate": 9.471824379375998e-06,
"loss": 0.3453,
"step": 539
},
{
"epoch": 1.6488549618320612,
"grad_norm": 0.6274479627609253,
"learning_rate": 9.469629105200937e-06,
"loss": 0.3616,
"step": 540
},
{
"epoch": 1.6519083969465649,
"grad_norm": 0.5881496667861938,
"learning_rate": 9.46742953381979e-06,
"loss": 0.3009,
"step": 541
},
{
"epoch": 1.6549618320610686,
"grad_norm": 0.6625784039497375,
"learning_rate": 9.465225667347275e-06,
"loss": 0.3739,
"step": 542
},
{
"epoch": 1.6580152671755726,
"grad_norm": 0.6975505352020264,
"learning_rate": 9.463017507902245e-06,
"loss": 0.3643,
"step": 543
},
{
"epoch": 1.6610687022900763,
"grad_norm": 0.972888171672821,
"learning_rate": 9.460805057607671e-06,
"loss": 0.3836,
"step": 544
},
{
"epoch": 1.66412213740458,
"grad_norm": 0.6268885731697083,
"learning_rate": 9.458588318590659e-06,
"loss": 0.3618,
"step": 545
},
{
"epoch": 1.667175572519084,
"grad_norm": 0.7843685150146484,
"learning_rate": 9.45636729298243e-06,
"loss": 0.3675,
"step": 546
},
{
"epoch": 1.670229007633588,
"grad_norm": 0.6543605923652649,
"learning_rate": 9.45414198291833e-06,
"loss": 0.3878,
"step": 547
},
{
"epoch": 1.6732824427480915,
"grad_norm": 0.6596100330352783,
"learning_rate": 9.451912390537828e-06,
"loss": 0.4148,
"step": 548
},
{
"epoch": 1.6763358778625954,
"grad_norm": 0.6652075052261353,
"learning_rate": 9.449678517984503e-06,
"loss": 0.3349,
"step": 549
},
{
"epoch": 1.6793893129770994,
"grad_norm": 0.7370254993438721,
"learning_rate": 9.447440367406053e-06,
"loss": 0.3383,
"step": 550
},
{
"epoch": 1.682442748091603,
"grad_norm": 0.6308868527412415,
"learning_rate": 9.445197940954292e-06,
"loss": 0.3314,
"step": 551
},
{
"epoch": 1.6854961832061068,
"grad_norm": 0.5558772087097168,
"learning_rate": 9.442951240785135e-06,
"loss": 0.3555,
"step": 552
},
{
"epoch": 1.6885496183206108,
"grad_norm": 0.5412786602973938,
"learning_rate": 9.440700269058617e-06,
"loss": 0.3496,
"step": 553
},
{
"epoch": 1.6916030534351145,
"grad_norm": 0.6621171236038208,
"learning_rate": 9.438445027938873e-06,
"loss": 0.3663,
"step": 554
},
{
"epoch": 1.6946564885496183,
"grad_norm": 0.646080493927002,
"learning_rate": 9.436185519594145e-06,
"loss": 0.3062,
"step": 555
},
{
"epoch": 1.6977099236641222,
"grad_norm": 0.698538601398468,
"learning_rate": 9.433921746196777e-06,
"loss": 0.3879,
"step": 556
},
{
"epoch": 1.700763358778626,
"grad_norm": 0.764561116695404,
"learning_rate": 9.431653709923214e-06,
"loss": 0.3478,
"step": 557
},
{
"epoch": 1.7038167938931297,
"grad_norm": 0.811100423336029,
"learning_rate": 9.429381412954e-06,
"loss": 0.3476,
"step": 558
},
{
"epoch": 1.7068702290076336,
"grad_norm": 0.6622545719146729,
"learning_rate": 9.427104857473773e-06,
"loss": 0.3858,
"step": 559
},
{
"epoch": 1.7099236641221374,
"grad_norm": 0.7208548784255981,
"learning_rate": 9.424824045671267e-06,
"loss": 0.3322,
"step": 560
},
{
"epoch": 1.712977099236641,
"grad_norm": 0.6171053647994995,
"learning_rate": 9.422538979739307e-06,
"loss": 0.3208,
"step": 561
},
{
"epoch": 1.716030534351145,
"grad_norm": 0.7387359142303467,
"learning_rate": 9.420249661874812e-06,
"loss": 0.3634,
"step": 562
},
{
"epoch": 1.719083969465649,
"grad_norm": 0.6727861166000366,
"learning_rate": 9.417956094278784e-06,
"loss": 0.3747,
"step": 563
},
{
"epoch": 1.7221374045801525,
"grad_norm": 0.6635738611221313,
"learning_rate": 9.415658279156312e-06,
"loss": 0.3112,
"step": 564
},
{
"epoch": 1.7251908396946565,
"grad_norm": 0.846847414970398,
"learning_rate": 9.41335621871657e-06,
"loss": 0.3365,
"step": 565
},
{
"epoch": 1.7282442748091604,
"grad_norm": 0.8118543028831482,
"learning_rate": 9.41104991517281e-06,
"loss": 0.3676,
"step": 566
},
{
"epoch": 1.7312977099236642,
"grad_norm": 0.730090320110321,
"learning_rate": 9.408739370742372e-06,
"loss": 0.3228,
"step": 567
},
{
"epoch": 1.734351145038168,
"grad_norm": 0.6440478563308716,
"learning_rate": 9.406424587646664e-06,
"loss": 0.3354,
"step": 568
},
{
"epoch": 1.7374045801526719,
"grad_norm": 0.7165378928184509,
"learning_rate": 9.404105568111173e-06,
"loss": 0.3518,
"step": 569
},
{
"epoch": 1.7404580152671756,
"grad_norm": 1.6431069374084473,
"learning_rate": 9.401782314365458e-06,
"loss": 0.3724,
"step": 570
},
{
"epoch": 1.7435114503816793,
"grad_norm": 1.1977882385253906,
"learning_rate": 9.39945482864315e-06,
"loss": 0.337,
"step": 571
},
{
"epoch": 1.7465648854961833,
"grad_norm": 0.8207442760467529,
"learning_rate": 9.39712311318195e-06,
"loss": 0.353,
"step": 572
},
{
"epoch": 1.749618320610687,
"grad_norm": 0.8047831058502197,
"learning_rate": 9.39478717022362e-06,
"loss": 0.3562,
"step": 573
},
{
"epoch": 1.7526717557251907,
"grad_norm": 0.8080096244812012,
"learning_rate": 9.392447002013996e-06,
"loss": 0.3484,
"step": 574
},
{
"epoch": 1.7557251908396947,
"grad_norm": 0.5786929130554199,
"learning_rate": 9.390102610802965e-06,
"loss": 0.3165,
"step": 575
},
{
"epoch": 1.7587786259541984,
"grad_norm": 0.633268415927887,
"learning_rate": 9.387753998844482e-06,
"loss": 0.3173,
"step": 576
},
{
"epoch": 1.7618320610687022,
"grad_norm": 0.9037169814109802,
"learning_rate": 9.385401168396558e-06,
"loss": 0.3436,
"step": 577
},
{
"epoch": 1.7648854961832061,
"grad_norm": 0.6977824568748474,
"learning_rate": 9.383044121721257e-06,
"loss": 0.3473,
"step": 578
},
{
"epoch": 1.76793893129771,
"grad_norm": 0.6818469166755676,
"learning_rate": 9.380682861084703e-06,
"loss": 0.3649,
"step": 579
},
{
"epoch": 1.7709923664122136,
"grad_norm": 1.125177264213562,
"learning_rate": 9.378317388757062e-06,
"loss": 0.3355,
"step": 580
},
{
"epoch": 1.7740458015267175,
"grad_norm": 0.876139223575592,
"learning_rate": 9.375947707012558e-06,
"loss": 0.3974,
"step": 581
},
{
"epoch": 1.7770992366412215,
"grad_norm": 0.5720423460006714,
"learning_rate": 9.37357381812946e-06,
"loss": 0.349,
"step": 582
},
{
"epoch": 1.7801526717557252,
"grad_norm": 0.7903271317481995,
"learning_rate": 9.371195724390075e-06,
"loss": 0.3719,
"step": 583
},
{
"epoch": 1.783206106870229,
"grad_norm": 0.7093537449836731,
"learning_rate": 9.368813428080763e-06,
"loss": 0.364,
"step": 584
},
{
"epoch": 1.786259541984733,
"grad_norm": 0.9907206296920776,
"learning_rate": 9.366426931491917e-06,
"loss": 0.3387,
"step": 585
},
{
"epoch": 1.7893129770992366,
"grad_norm": 0.9979354739189148,
"learning_rate": 9.364036236917972e-06,
"loss": 0.3838,
"step": 586
},
{
"epoch": 1.7923664122137404,
"grad_norm": 0.6941604614257812,
"learning_rate": 9.361641346657396e-06,
"loss": 0.4038,
"step": 587
},
{
"epoch": 1.7954198473282443,
"grad_norm": 0.7062864303588867,
"learning_rate": 9.359242263012693e-06,
"loss": 0.3489,
"step": 588
},
{
"epoch": 1.798473282442748,
"grad_norm": 0.6261604428291321,
"learning_rate": 9.356838988290401e-06,
"loss": 0.343,
"step": 589
},
{
"epoch": 1.8015267175572518,
"grad_norm": 0.571570098400116,
"learning_rate": 9.354431524801082e-06,
"loss": 0.3222,
"step": 590
},
{
"epoch": 1.8045801526717558,
"grad_norm": 0.6786599159240723,
"learning_rate": 9.352019874859326e-06,
"loss": 0.3547,
"step": 591
},
{
"epoch": 1.8076335877862595,
"grad_norm": 0.5915871262550354,
"learning_rate": 9.349604040783754e-06,
"loss": 0.3537,
"step": 592
},
{
"epoch": 1.8106870229007632,
"grad_norm": 1.6428577899932861,
"learning_rate": 9.347184024897003e-06,
"loss": 0.2964,
"step": 593
},
{
"epoch": 1.8137404580152672,
"grad_norm": 0.9900272488594055,
"learning_rate": 9.344759829525734e-06,
"loss": 0.3649,
"step": 594
},
{
"epoch": 1.8167938931297711,
"grad_norm": 0.8548586964607239,
"learning_rate": 9.342331457000621e-06,
"loss": 0.3506,
"step": 595
},
{
"epoch": 1.8198473282442749,
"grad_norm": 0.7072058916091919,
"learning_rate": 9.339898909656364e-06,
"loss": 0.3663,
"step": 596
},
{
"epoch": 1.8229007633587786,
"grad_norm": 0.5867598056793213,
"learning_rate": 9.33746218983167e-06,
"loss": 0.3503,
"step": 597
},
{
"epoch": 1.8259541984732826,
"grad_norm": 0.714758574962616,
"learning_rate": 9.335021299869256e-06,
"loss": 0.4388,
"step": 598
},
{
"epoch": 1.8290076335877863,
"grad_norm": 0.6253016591072083,
"learning_rate": 9.332576242115852e-06,
"loss": 0.3561,
"step": 599
},
{
"epoch": 1.83206106870229,
"grad_norm": 0.8419109582901001,
"learning_rate": 9.330127018922195e-06,
"loss": 0.3345,
"step": 600
},
{
"epoch": 1.835114503816794,
"grad_norm": 0.5708754062652588,
"learning_rate": 9.327673632643021e-06,
"loss": 0.3337,
"step": 601
},
{
"epoch": 1.8381679389312977,
"grad_norm": 0.8046593070030212,
"learning_rate": 9.32521608563708e-06,
"loss": 0.3828,
"step": 602
},
{
"epoch": 1.8412213740458014,
"grad_norm": 0.8696548342704773,
"learning_rate": 9.32275438026711e-06,
"loss": 0.3518,
"step": 603
},
{
"epoch": 1.8442748091603054,
"grad_norm": 0.7207351922988892,
"learning_rate": 9.320288518899853e-06,
"loss": 0.3504,
"step": 604
},
{
"epoch": 1.8473282442748091,
"grad_norm": 0.611146092414856,
"learning_rate": 9.317818503906046e-06,
"loss": 0.3893,
"step": 605
},
{
"epoch": 1.8503816793893129,
"grad_norm": 0.6432805061340332,
"learning_rate": 9.315344337660422e-06,
"loss": 0.3534,
"step": 606
},
{
"epoch": 1.8534351145038168,
"grad_norm": 0.7569887638092041,
"learning_rate": 9.312866022541697e-06,
"loss": 0.3661,
"step": 607
},
{
"epoch": 1.8564885496183208,
"grad_norm": 0.781062126159668,
"learning_rate": 9.310383560932587e-06,
"loss": 0.346,
"step": 608
},
{
"epoch": 1.8595419847328243,
"grad_norm": 1.1158342361450195,
"learning_rate": 9.307896955219787e-06,
"loss": 0.3526,
"step": 609
},
{
"epoch": 1.8625954198473282,
"grad_norm": 0.6134322881698608,
"learning_rate": 9.305406207793974e-06,
"loss": 0.335,
"step": 610
},
{
"epoch": 1.8656488549618322,
"grad_norm": 0.6235983371734619,
"learning_rate": 9.302911321049818e-06,
"loss": 0.3579,
"step": 611
},
{
"epoch": 1.868702290076336,
"grad_norm": 0.5590095520019531,
"learning_rate": 9.300412297385954e-06,
"loss": 0.3365,
"step": 612
},
{
"epoch": 1.8717557251908397,
"grad_norm": 0.6658725142478943,
"learning_rate": 9.297909139205005e-06,
"loss": 0.33,
"step": 613
},
{
"epoch": 1.8748091603053436,
"grad_norm": 0.6123189330101013,
"learning_rate": 9.295401848913569e-06,
"loss": 0.3516,
"step": 614
},
{
"epoch": 1.8778625954198473,
"grad_norm": 0.7374983429908752,
"learning_rate": 9.29289042892221e-06,
"loss": 0.3599,
"step": 615
},
{
"epoch": 1.880916030534351,
"grad_norm": 1.2691185474395752,
"learning_rate": 9.290374881645465e-06,
"loss": 0.3624,
"step": 616
},
{
"epoch": 1.883969465648855,
"grad_norm": 0.5804101824760437,
"learning_rate": 9.287855209501844e-06,
"loss": 0.354,
"step": 617
},
{
"epoch": 1.8870229007633588,
"grad_norm": 0.8695326447486877,
"learning_rate": 9.285331414913816e-06,
"loss": 0.3463,
"step": 618
},
{
"epoch": 1.8900763358778625,
"grad_norm": 0.5883278846740723,
"learning_rate": 9.282803500307818e-06,
"loss": 0.3054,
"step": 619
},
{
"epoch": 1.8931297709923665,
"grad_norm": 0.7179763317108154,
"learning_rate": 9.280271468114243e-06,
"loss": 0.3555,
"step": 620
},
{
"epoch": 1.8961832061068702,
"grad_norm": 0.7432013750076294,
"learning_rate": 9.277735320767449e-06,
"loss": 0.3546,
"step": 621
},
{
"epoch": 1.899236641221374,
"grad_norm": 0.6553454399108887,
"learning_rate": 9.275195060705749e-06,
"loss": 0.3729,
"step": 622
},
{
"epoch": 1.9022900763358779,
"grad_norm": 0.6198839545249939,
"learning_rate": 9.272650690371403e-06,
"loss": 0.3352,
"step": 623
},
{
"epoch": 1.9053435114503818,
"grad_norm": 0.9423778057098389,
"learning_rate": 9.270102212210632e-06,
"loss": 0.4039,
"step": 624
},
{
"epoch": 1.9083969465648853,
"grad_norm": 0.7900342345237732,
"learning_rate": 9.267549628673603e-06,
"loss": 0.3281,
"step": 625
},
{
"epoch": 1.9114503816793893,
"grad_norm": 0.8445188403129578,
"learning_rate": 9.264992942214427e-06,
"loss": 0.3228,
"step": 626
},
{
"epoch": 1.9145038167938933,
"grad_norm": 0.6197785139083862,
"learning_rate": 9.262432155291167e-06,
"loss": 0.3679,
"step": 627
},
{
"epoch": 1.917557251908397,
"grad_norm": 0.5474112629890442,
"learning_rate": 9.25986727036582e-06,
"loss": 0.3699,
"step": 628
},
{
"epoch": 1.9206106870229007,
"grad_norm": 0.6815104484558105,
"learning_rate": 9.257298289904324e-06,
"loss": 0.3509,
"step": 629
},
{
"epoch": 1.9236641221374047,
"grad_norm": 0.7210928201675415,
"learning_rate": 9.254725216376562e-06,
"loss": 0.379,
"step": 630
},
{
"epoch": 1.9267175572519084,
"grad_norm": 0.8056854605674744,
"learning_rate": 9.252148052256343e-06,
"loss": 0.3843,
"step": 631
},
{
"epoch": 1.9297709923664121,
"grad_norm": 0.7688245177268982,
"learning_rate": 9.249566800021417e-06,
"loss": 0.4191,
"step": 632
},
{
"epoch": 1.932824427480916,
"grad_norm": 0.6108483672142029,
"learning_rate": 9.246981462153456e-06,
"loss": 0.3157,
"step": 633
},
{
"epoch": 1.9358778625954198,
"grad_norm": 0.6627204418182373,
"learning_rate": 9.244392041138068e-06,
"loss": 0.3523,
"step": 634
},
{
"epoch": 1.9389312977099236,
"grad_norm": 0.7666142582893372,
"learning_rate": 9.24179853946478e-06,
"loss": 0.3966,
"step": 635
},
{
"epoch": 1.9419847328244275,
"grad_norm": 0.7710684537887573,
"learning_rate": 9.239200959627048e-06,
"loss": 0.3738,
"step": 636
},
{
"epoch": 1.9450381679389313,
"grad_norm": 0.8858833312988281,
"learning_rate": 9.236599304122246e-06,
"loss": 0.3372,
"step": 637
},
{
"epoch": 1.948091603053435,
"grad_norm": 0.7985635995864868,
"learning_rate": 9.233993575451663e-06,
"loss": 0.352,
"step": 638
},
{
"epoch": 1.951145038167939,
"grad_norm": 0.5600748062133789,
"learning_rate": 9.231383776120512e-06,
"loss": 0.349,
"step": 639
},
{
"epoch": 1.954198473282443,
"grad_norm": 0.6206453442573547,
"learning_rate": 9.228769908637912e-06,
"loss": 0.3808,
"step": 640
},
{
"epoch": 1.9572519083969464,
"grad_norm": 0.5247876644134521,
"learning_rate": 9.226151975516897e-06,
"loss": 0.333,
"step": 641
},
{
"epoch": 1.9603053435114504,
"grad_norm": 0.7726635336875916,
"learning_rate": 9.223529979274411e-06,
"loss": 0.3469,
"step": 642
},
{
"epoch": 1.9633587786259543,
"grad_norm": 0.7760521173477173,
"learning_rate": 9.220903922431302e-06,
"loss": 0.3708,
"step": 643
},
{
"epoch": 1.966412213740458,
"grad_norm": 0.8805868625640869,
"learning_rate": 9.218273807512318e-06,
"loss": 0.3371,
"step": 644
},
{
"epoch": 1.9694656488549618,
"grad_norm": 0.616565465927124,
"learning_rate": 9.215639637046121e-06,
"loss": 0.377,
"step": 645
},
{
"epoch": 1.9725190839694657,
"grad_norm": 0.8857041597366333,
"learning_rate": 9.213001413565259e-06,
"loss": 0.3731,
"step": 646
},
{
"epoch": 1.9755725190839695,
"grad_norm": 1.0865479707717896,
"learning_rate": 9.210359139606183e-06,
"loss": 0.3517,
"step": 647
},
{
"epoch": 1.9786259541984732,
"grad_norm": 0.7575787305831909,
"learning_rate": 9.207712817709237e-06,
"loss": 0.347,
"step": 648
},
{
"epoch": 1.9816793893129772,
"grad_norm": 0.7119488716125488,
"learning_rate": 9.205062450418655e-06,
"loss": 0.3369,
"step": 649
},
{
"epoch": 1.984732824427481,
"grad_norm": 0.6280040144920349,
"learning_rate": 9.202408040282567e-06,
"loss": 0.378,
"step": 650
},
{
"epoch": 1.9877862595419846,
"grad_norm": 0.6118285059928894,
"learning_rate": 9.19974958985298e-06,
"loss": 0.3752,
"step": 651
},
{
"epoch": 1.9908396946564886,
"grad_norm": 0.7066951990127563,
"learning_rate": 9.197087101685794e-06,
"loss": 0.3694,
"step": 652
},
{
"epoch": 1.9938931297709923,
"grad_norm": 0.7283354997634888,
"learning_rate": 9.194420578340785e-06,
"loss": 0.3502,
"step": 653
},
{
"epoch": 1.996946564885496,
"grad_norm": 0.7202633619308472,
"learning_rate": 9.191750022381613e-06,
"loss": 0.3882,
"step": 654
},
{
"epoch": 2.0,
"grad_norm": 0.8570371270179749,
"learning_rate": 9.189075436375813e-06,
"loss": 0.3417,
"step": 655
},
{
"epoch": 2.003053435114504,
"grad_norm": 0.57913738489151,
"learning_rate": 9.186396822894792e-06,
"loss": 0.2884,
"step": 656
},
{
"epoch": 2.0061068702290075,
"grad_norm": 0.5783664584159851,
"learning_rate": 9.183714184513832e-06,
"loss": 0.2373,
"step": 657
},
{
"epoch": 2.0091603053435114,
"grad_norm": 0.5738585591316223,
"learning_rate": 9.181027523812088e-06,
"loss": 0.2687,
"step": 658
},
{
"epoch": 2.0122137404580154,
"grad_norm": 0.5343250632286072,
"learning_rate": 9.178336843372576e-06,
"loss": 0.2308,
"step": 659
},
{
"epoch": 2.015267175572519,
"grad_norm": 0.5259422063827515,
"learning_rate": 9.175642145782179e-06,
"loss": 0.2826,
"step": 660
},
{
"epoch": 2.018320610687023,
"grad_norm": 0.5994972586631775,
"learning_rate": 9.172943433631642e-06,
"loss": 0.2419,
"step": 661
},
{
"epoch": 2.021374045801527,
"grad_norm": 0.7430315613746643,
"learning_rate": 9.170240709515573e-06,
"loss": 0.2651,
"step": 662
},
{
"epoch": 2.0244274809160308,
"grad_norm": 0.5850917100906372,
"learning_rate": 9.16753397603243e-06,
"loss": 0.2281,
"step": 663
},
{
"epoch": 2.0274809160305343,
"grad_norm": 0.7387170791625977,
"learning_rate": 9.164823235784535e-06,
"loss": 0.2729,
"step": 664
},
{
"epoch": 2.030534351145038,
"grad_norm": 0.7842447757720947,
"learning_rate": 9.162108491378051e-06,
"loss": 0.2585,
"step": 665
},
{
"epoch": 2.033587786259542,
"grad_norm": 0.6147224307060242,
"learning_rate": 9.159389745423003e-06,
"loss": 0.266,
"step": 666
},
{
"epoch": 2.0366412213740457,
"grad_norm": 0.748267650604248,
"learning_rate": 9.156667000533251e-06,
"loss": 0.2727,
"step": 667
},
{
"epoch": 2.0396946564885496,
"grad_norm": 0.7011958360671997,
"learning_rate": 9.153940259326511e-06,
"loss": 0.235,
"step": 668
},
{
"epoch": 2.0427480916030536,
"grad_norm": 0.6798514723777771,
"learning_rate": 9.151209524424333e-06,
"loss": 0.2458,
"step": 669
},
{
"epoch": 2.045801526717557,
"grad_norm": 0.6440619826316833,
"learning_rate": 9.14847479845211e-06,
"loss": 0.2906,
"step": 670
},
{
"epoch": 2.048854961832061,
"grad_norm": 0.5712239742279053,
"learning_rate": 9.145736084039073e-06,
"loss": 0.2263,
"step": 671
},
{
"epoch": 2.051908396946565,
"grad_norm": 0.6084513664245605,
"learning_rate": 9.142993383818284e-06,
"loss": 0.2743,
"step": 672
},
{
"epoch": 2.0549618320610685,
"grad_norm": 0.5967226624488831,
"learning_rate": 9.14024670042664e-06,
"loss": 0.252,
"step": 673
},
{
"epoch": 2.0580152671755725,
"grad_norm": 0.6971317529678345,
"learning_rate": 9.137496036504868e-06,
"loss": 0.2514,
"step": 674
},
{
"epoch": 2.0610687022900764,
"grad_norm": 0.5542774796485901,
"learning_rate": 9.134741394697517e-06,
"loss": 0.2222,
"step": 675
},
{
"epoch": 2.06412213740458,
"grad_norm": 0.6813351511955261,
"learning_rate": 9.131982777652967e-06,
"loss": 0.2867,
"step": 676
},
{
"epoch": 2.067175572519084,
"grad_norm": 0.5970659852027893,
"learning_rate": 9.129220188023419e-06,
"loss": 0.2316,
"step": 677
},
{
"epoch": 2.070229007633588,
"grad_norm": 0.6117345690727234,
"learning_rate": 9.126453628464889e-06,
"loss": 0.238,
"step": 678
},
{
"epoch": 2.073282442748092,
"grad_norm": 0.6005414128303528,
"learning_rate": 9.12368310163721e-06,
"loss": 0.2379,
"step": 679
},
{
"epoch": 2.0763358778625953,
"grad_norm": 0.8919030427932739,
"learning_rate": 9.120908610204036e-06,
"loss": 0.2645,
"step": 680
},
{
"epoch": 2.0793893129770993,
"grad_norm": 0.7377813458442688,
"learning_rate": 9.118130156832823e-06,
"loss": 0.2618,
"step": 681
},
{
"epoch": 2.0824427480916032,
"grad_norm": 0.8021283745765686,
"learning_rate": 9.115347744194844e-06,
"loss": 0.2499,
"step": 682
},
{
"epoch": 2.0854961832061067,
"grad_norm": 0.6470867991447449,
"learning_rate": 9.112561374965177e-06,
"loss": 0.2783,
"step": 683
},
{
"epoch": 2.0885496183206107,
"grad_norm": 0.685403048992157,
"learning_rate": 9.109771051822702e-06,
"loss": 0.2674,
"step": 684
},
{
"epoch": 2.0916030534351147,
"grad_norm": 1.0172556638717651,
"learning_rate": 9.106976777450099e-06,
"loss": 0.2403,
"step": 685
},
{
"epoch": 2.094656488549618,
"grad_norm": 0.627636730670929,
"learning_rate": 9.10417855453385e-06,
"loss": 0.2772,
"step": 686
},
{
"epoch": 2.097709923664122,
"grad_norm": 0.6921853423118591,
"learning_rate": 9.10137638576423e-06,
"loss": 0.2921,
"step": 687
},
{
"epoch": 2.100763358778626,
"grad_norm": 0.5929262042045593,
"learning_rate": 9.098570273835314e-06,
"loss": 0.2268,
"step": 688
},
{
"epoch": 2.1038167938931296,
"grad_norm": 0.6772569417953491,
"learning_rate": 9.09576022144496e-06,
"loss": 0.2409,
"step": 689
},
{
"epoch": 2.1068702290076335,
"grad_norm": 0.6173834204673767,
"learning_rate": 9.09294623129482e-06,
"loss": 0.2176,
"step": 690
},
{
"epoch": 2.1099236641221375,
"grad_norm": 0.6309353113174438,
"learning_rate": 9.090128306090329e-06,
"loss": 0.2852,
"step": 691
},
{
"epoch": 2.112977099236641,
"grad_norm": 2.1778008937835693,
"learning_rate": 9.087306448540707e-06,
"loss": 0.2438,
"step": 692
},
{
"epoch": 2.116030534351145,
"grad_norm": 0.6542166471481323,
"learning_rate": 9.084480661358954e-06,
"loss": 0.2771,
"step": 693
},
{
"epoch": 2.119083969465649,
"grad_norm": 1.0429388284683228,
"learning_rate": 9.081650947261847e-06,
"loss": 0.2564,
"step": 694
},
{
"epoch": 2.122137404580153,
"grad_norm": 0.6057903170585632,
"learning_rate": 9.07881730896994e-06,
"loss": 0.2573,
"step": 695
},
{
"epoch": 2.1251908396946564,
"grad_norm": 0.5958608984947205,
"learning_rate": 9.07597974920756e-06,
"loss": 0.2802,
"step": 696
},
{
"epoch": 2.1282442748091603,
"grad_norm": 0.8294628262519836,
"learning_rate": 9.073138270702804e-06,
"loss": 0.2499,
"step": 697
},
{
"epoch": 2.1312977099236643,
"grad_norm": 0.5929973125457764,
"learning_rate": 9.070292876187532e-06,
"loss": 0.2497,
"step": 698
},
{
"epoch": 2.134351145038168,
"grad_norm": 0.665277361869812,
"learning_rate": 9.067443568397378e-06,
"loss": 0.2657,
"step": 699
},
{
"epoch": 2.1374045801526718,
"grad_norm": 0.7754712104797363,
"learning_rate": 9.06459035007173e-06,
"loss": 0.3086,
"step": 700
},
{
"epoch": 2.1404580152671757,
"grad_norm": 1.2111577987670898,
"learning_rate": 9.061733223953738e-06,
"loss": 0.2479,
"step": 701
},
{
"epoch": 2.1435114503816792,
"grad_norm": 0.6212031841278076,
"learning_rate": 9.058872192790314e-06,
"loss": 0.2875,
"step": 702
},
{
"epoch": 2.146564885496183,
"grad_norm": 0.5979281067848206,
"learning_rate": 9.056007259332115e-06,
"loss": 0.2898,
"step": 703
},
{
"epoch": 2.149618320610687,
"grad_norm": 1.1491578817367554,
"learning_rate": 9.053138426333562e-06,
"loss": 0.2675,
"step": 704
},
{
"epoch": 2.1526717557251906,
"grad_norm": 0.49965476989746094,
"learning_rate": 9.05026569655281e-06,
"loss": 0.2566,
"step": 705
},
{
"epoch": 2.1557251908396946,
"grad_norm": 0.797118604183197,
"learning_rate": 9.047389072751777e-06,
"loss": 0.2464,
"step": 706
},
{
"epoch": 2.1587786259541986,
"grad_norm": 0.7342759966850281,
"learning_rate": 9.044508557696111e-06,
"loss": 0.2467,
"step": 707
},
{
"epoch": 2.1618320610687025,
"grad_norm": 0.637121319770813,
"learning_rate": 9.041624154155208e-06,
"loss": 0.2773,
"step": 708
},
{
"epoch": 2.164885496183206,
"grad_norm": 0.6539364457130432,
"learning_rate": 9.038735864902201e-06,
"loss": 0.2157,
"step": 709
},
{
"epoch": 2.16793893129771,
"grad_norm": 0.6470495462417603,
"learning_rate": 9.035843692713961e-06,
"loss": 0.2528,
"step": 710
},
{
"epoch": 2.170992366412214,
"grad_norm": 0.6460850834846497,
"learning_rate": 9.032947640371086e-06,
"loss": 0.234,
"step": 711
},
{
"epoch": 2.1740458015267174,
"grad_norm": 0.7023307085037231,
"learning_rate": 9.030047710657912e-06,
"loss": 0.2081,
"step": 712
},
{
"epoch": 2.1770992366412214,
"grad_norm": 0.5989711284637451,
"learning_rate": 9.027143906362499e-06,
"loss": 0.2408,
"step": 713
},
{
"epoch": 2.1801526717557254,
"grad_norm": 0.633488118648529,
"learning_rate": 9.02423623027663e-06,
"loss": 0.2887,
"step": 714
},
{
"epoch": 2.183206106870229,
"grad_norm": 0.7098293304443359,
"learning_rate": 9.021324685195814e-06,
"loss": 0.2621,
"step": 715
},
{
"epoch": 2.186259541984733,
"grad_norm": 0.6813624501228333,
"learning_rate": 9.018409273919279e-06,
"loss": 0.2866,
"step": 716
},
{
"epoch": 2.189312977099237,
"grad_norm": 0.5214574337005615,
"learning_rate": 9.01548999924997e-06,
"loss": 0.2519,
"step": 717
},
{
"epoch": 2.1923664122137403,
"grad_norm": 0.6458209156990051,
"learning_rate": 9.012566863994548e-06,
"loss": 0.2564,
"step": 718
},
{
"epoch": 2.1954198473282442,
"grad_norm": 0.5464696288108826,
"learning_rate": 9.00963987096338e-06,
"loss": 0.2371,
"step": 719
},
{
"epoch": 2.198473282442748,
"grad_norm": 0.5468431711196899,
"learning_rate": 9.006709022970547e-06,
"loss": 0.2424,
"step": 720
},
{
"epoch": 2.2015267175572517,
"grad_norm": 0.8053451180458069,
"learning_rate": 9.003774322833835e-06,
"loss": 0.2497,
"step": 721
},
{
"epoch": 2.2045801526717557,
"grad_norm": 0.5873537659645081,
"learning_rate": 9.000835773374733e-06,
"loss": 0.2607,
"step": 722
},
{
"epoch": 2.2076335877862596,
"grad_norm": 0.8296732306480408,
"learning_rate": 8.997893377418432e-06,
"loss": 0.2692,
"step": 723
},
{
"epoch": 2.210687022900763,
"grad_norm": 0.5793552994728088,
"learning_rate": 8.99494713779382e-06,
"loss": 0.2532,
"step": 724
},
{
"epoch": 2.213740458015267,
"grad_norm": 0.6494630575180054,
"learning_rate": 8.991997057333481e-06,
"loss": 0.2353,
"step": 725
},
{
"epoch": 2.216793893129771,
"grad_norm": 0.6542250514030457,
"learning_rate": 8.98904313887369e-06,
"loss": 0.2673,
"step": 726
},
{
"epoch": 2.219847328244275,
"grad_norm": 0.6260940432548523,
"learning_rate": 8.986085385254417e-06,
"loss": 0.2565,
"step": 727
},
{
"epoch": 2.2229007633587785,
"grad_norm": 0.6449838876724243,
"learning_rate": 8.983123799319312e-06,
"loss": 0.2788,
"step": 728
},
{
"epoch": 2.2259541984732825,
"grad_norm": 0.7502977848052979,
"learning_rate": 8.980158383915714e-06,
"loss": 0.2789,
"step": 729
},
{
"epoch": 2.2290076335877864,
"grad_norm": 0.7007535696029663,
"learning_rate": 8.977189141894645e-06,
"loss": 0.2286,
"step": 730
},
{
"epoch": 2.23206106870229,
"grad_norm": 0.6351751089096069,
"learning_rate": 8.9742160761108e-06,
"loss": 0.2766,
"step": 731
},
{
"epoch": 2.235114503816794,
"grad_norm": 0.6929914355278015,
"learning_rate": 8.971239189422555e-06,
"loss": 0.2718,
"step": 732
},
{
"epoch": 2.238167938931298,
"grad_norm": 0.5510519742965698,
"learning_rate": 8.968258484691961e-06,
"loss": 0.2412,
"step": 733
},
{
"epoch": 2.2412213740458014,
"grad_norm": 0.5474069118499756,
"learning_rate": 8.965273964784735e-06,
"loss": 0.265,
"step": 734
},
{
"epoch": 2.2442748091603053,
"grad_norm": 2.7446393966674805,
"learning_rate": 8.962285632570266e-06,
"loss": 0.3012,
"step": 735
},
{
"epoch": 2.2473282442748093,
"grad_norm": 0.6975182294845581,
"learning_rate": 8.959293490921606e-06,
"loss": 0.2669,
"step": 736
},
{
"epoch": 2.2503816793893128,
"grad_norm": 0.6742974519729614,
"learning_rate": 8.956297542715469e-06,
"loss": 0.2793,
"step": 737
},
{
"epoch": 2.2534351145038167,
"grad_norm": 0.5771411061286926,
"learning_rate": 8.953297790832231e-06,
"loss": 0.2674,
"step": 738
},
{
"epoch": 2.2564885496183207,
"grad_norm": 0.9962130188941956,
"learning_rate": 8.950294238155924e-06,
"loss": 0.283,
"step": 739
},
{
"epoch": 2.2595419847328246,
"grad_norm": 0.6573465466499329,
"learning_rate": 8.947286887574234e-06,
"loss": 0.2569,
"step": 740
},
{
"epoch": 2.262595419847328,
"grad_norm": 0.6087695360183716,
"learning_rate": 8.944275741978495e-06,
"loss": 0.2407,
"step": 741
},
{
"epoch": 2.265648854961832,
"grad_norm": 0.7028131484985352,
"learning_rate": 8.941260804263697e-06,
"loss": 0.2568,
"step": 742
},
{
"epoch": 2.268702290076336,
"grad_norm": 0.5812912583351135,
"learning_rate": 8.938242077328469e-06,
"loss": 0.2401,
"step": 743
},
{
"epoch": 2.2717557251908396,
"grad_norm": 0.6277469396591187,
"learning_rate": 8.935219564075087e-06,
"loss": 0.267,
"step": 744
},
{
"epoch": 2.2748091603053435,
"grad_norm": 0.6467286348342896,
"learning_rate": 8.932193267409465e-06,
"loss": 0.2383,
"step": 745
},
{
"epoch": 2.2778625954198475,
"grad_norm": 0.9729459881782532,
"learning_rate": 8.929163190241157e-06,
"loss": 0.2881,
"step": 746
},
{
"epoch": 2.280916030534351,
"grad_norm": 0.6068406105041504,
"learning_rate": 8.92612933548335e-06,
"loss": 0.2799,
"step": 747
},
{
"epoch": 2.283969465648855,
"grad_norm": 0.6279863119125366,
"learning_rate": 8.923091706052863e-06,
"loss": 0.2628,
"step": 748
},
{
"epoch": 2.287022900763359,
"grad_norm": 0.7431386113166809,
"learning_rate": 8.920050304870142e-06,
"loss": 0.2327,
"step": 749
},
{
"epoch": 2.2900763358778624,
"grad_norm": 0.5755142569541931,
"learning_rate": 8.917005134859263e-06,
"loss": 0.2776,
"step": 750
},
{
"epoch": 2.2931297709923664,
"grad_norm": 0.6022436022758484,
"learning_rate": 8.913956198947923e-06,
"loss": 0.2558,
"step": 751
},
{
"epoch": 2.2961832061068703,
"grad_norm": 0.5605312585830688,
"learning_rate": 8.910903500067443e-06,
"loss": 0.2314,
"step": 752
},
{
"epoch": 2.2992366412213743,
"grad_norm": 1.0554085969924927,
"learning_rate": 8.907847041152757e-06,
"loss": 0.2527,
"step": 753
},
{
"epoch": 2.302290076335878,
"grad_norm": 0.5970160365104675,
"learning_rate": 8.904786825142416e-06,
"loss": 0.2613,
"step": 754
},
{
"epoch": 2.3053435114503817,
"grad_norm": 0.6039701104164124,
"learning_rate": 8.901722854978582e-06,
"loss": 0.2706,
"step": 755
},
{
"epoch": 2.3083969465648853,
"grad_norm": 1.0748103857040405,
"learning_rate": 8.89865513360703e-06,
"loss": 0.2639,
"step": 756
},
{
"epoch": 2.311450381679389,
"grad_norm": 0.5857123136520386,
"learning_rate": 8.89558366397714e-06,
"loss": 0.2563,
"step": 757
},
{
"epoch": 2.314503816793893,
"grad_norm": 0.6082928776741028,
"learning_rate": 8.892508449041893e-06,
"loss": 0.2673,
"step": 758
},
{
"epoch": 2.317557251908397,
"grad_norm": 0.6259625554084778,
"learning_rate": 8.889429491757872e-06,
"loss": 0.2518,
"step": 759
},
{
"epoch": 2.3206106870229006,
"grad_norm": 0.5631780028343201,
"learning_rate": 8.88634679508526e-06,
"loss": 0.2473,
"step": 760
},
{
"epoch": 2.3236641221374046,
"grad_norm": 0.5699191093444824,
"learning_rate": 8.883260361987833e-06,
"loss": 0.2727,
"step": 761
},
{
"epoch": 2.3267175572519085,
"grad_norm": 0.6020009517669678,
"learning_rate": 8.88017019543296e-06,
"loss": 0.2828,
"step": 762
},
{
"epoch": 2.329770992366412,
"grad_norm": 0.5712583661079407,
"learning_rate": 8.8770762983916e-06,
"loss": 0.2663,
"step": 763
},
{
"epoch": 2.332824427480916,
"grad_norm": 1.0059030055999756,
"learning_rate": 8.8739786738383e-06,
"loss": 0.2717,
"step": 764
},
{
"epoch": 2.33587786259542,
"grad_norm": 0.4975337088108063,
"learning_rate": 8.870877324751186e-06,
"loss": 0.233,
"step": 765
},
{
"epoch": 2.3389312977099235,
"grad_norm": 0.6985334753990173,
"learning_rate": 8.867772254111966e-06,
"loss": 0.2471,
"step": 766
},
{
"epoch": 2.3419847328244274,
"grad_norm": 0.6654549241065979,
"learning_rate": 8.864663464905933e-06,
"loss": 0.2842,
"step": 767
},
{
"epoch": 2.3450381679389314,
"grad_norm": 0.6422243118286133,
"learning_rate": 8.861550960121946e-06,
"loss": 0.259,
"step": 768
},
{
"epoch": 2.348091603053435,
"grad_norm": 0.613527774810791,
"learning_rate": 8.85843474275244e-06,
"loss": 0.2921,
"step": 769
},
{
"epoch": 2.351145038167939,
"grad_norm": 0.5522801876068115,
"learning_rate": 8.85531481579342e-06,
"loss": 0.2357,
"step": 770
},
{
"epoch": 2.354198473282443,
"grad_norm": 0.6118313074111938,
"learning_rate": 8.852191182244456e-06,
"loss": 0.2738,
"step": 771
},
{
"epoch": 2.3572519083969468,
"grad_norm": 0.62444669008255,
"learning_rate": 8.849063845108685e-06,
"loss": 0.2394,
"step": 772
},
{
"epoch": 2.3603053435114503,
"grad_norm": 0.6405894756317139,
"learning_rate": 8.8459328073928e-06,
"loss": 0.2597,
"step": 773
},
{
"epoch": 2.3633587786259542,
"grad_norm": 0.640144944190979,
"learning_rate": 8.842798072107055e-06,
"loss": 0.2814,
"step": 774
},
{
"epoch": 2.366412213740458,
"grad_norm": 0.5746338367462158,
"learning_rate": 8.839659642265259e-06,
"loss": 0.2738,
"step": 775
},
{
"epoch": 2.3694656488549617,
"grad_norm": 0.5756247043609619,
"learning_rate": 8.836517520884768e-06,
"loss": 0.2609,
"step": 776
},
{
"epoch": 2.3725190839694656,
"grad_norm": 0.567839503288269,
"learning_rate": 8.833371710986493e-06,
"loss": 0.2597,
"step": 777
},
{
"epoch": 2.3755725190839696,
"grad_norm": 0.5718839764595032,
"learning_rate": 8.83022221559489e-06,
"loss": 0.2311,
"step": 778
},
{
"epoch": 2.378625954198473,
"grad_norm": 0.6920585632324219,
"learning_rate": 8.827069037737958e-06,
"loss": 0.2251,
"step": 779
},
{
"epoch": 2.381679389312977,
"grad_norm": 0.6274014711380005,
"learning_rate": 8.823912180447237e-06,
"loss": 0.2808,
"step": 780
},
{
"epoch": 2.384732824427481,
"grad_norm": 0.841109573841095,
"learning_rate": 8.820751646757798e-06,
"loss": 0.2514,
"step": 781
},
{
"epoch": 2.3877862595419845,
"grad_norm": 0.5258099436759949,
"learning_rate": 8.81758743970826e-06,
"loss": 0.2368,
"step": 782
},
{
"epoch": 2.3908396946564885,
"grad_norm": 0.5547391772270203,
"learning_rate": 8.81441956234076e-06,
"loss": 0.2677,
"step": 783
},
{
"epoch": 2.3938931297709924,
"grad_norm": 0.5909098386764526,
"learning_rate": 8.81124801770097e-06,
"loss": 0.2814,
"step": 784
},
{
"epoch": 2.3969465648854964,
"grad_norm": 0.5489515662193298,
"learning_rate": 8.80807280883809e-06,
"loss": 0.2393,
"step": 785
},
{
"epoch": 2.4,
"grad_norm": 0.677237868309021,
"learning_rate": 8.804893938804839e-06,
"loss": 0.2536,
"step": 786
},
{
"epoch": 2.403053435114504,
"grad_norm": 0.547006368637085,
"learning_rate": 8.801711410657456e-06,
"loss": 0.2918,
"step": 787
},
{
"epoch": 2.406106870229008,
"grad_norm": 0.5593461990356445,
"learning_rate": 8.7985252274557e-06,
"loss": 0.2333,
"step": 788
},
{
"epoch": 2.4091603053435113,
"grad_norm": 0.6684463620185852,
"learning_rate": 8.795335392262841e-06,
"loss": 0.2593,
"step": 789
},
{
"epoch": 2.4122137404580153,
"grad_norm": 0.6524422764778137,
"learning_rate": 8.79214190814566e-06,
"loss": 0.2802,
"step": 790
},
{
"epoch": 2.4152671755725192,
"grad_norm": 0.6032187342643738,
"learning_rate": 8.78894477817445e-06,
"loss": 0.259,
"step": 791
},
{
"epoch": 2.4183206106870228,
"grad_norm": 0.5767791271209717,
"learning_rate": 8.785744005423003e-06,
"loss": 0.2236,
"step": 792
},
{
"epoch": 2.4213740458015267,
"grad_norm": 0.5568051934242249,
"learning_rate": 8.78253959296862e-06,
"loss": 0.2535,
"step": 793
},
{
"epoch": 2.4244274809160307,
"grad_norm": 0.6259385347366333,
"learning_rate": 8.779331543892097e-06,
"loss": 0.2585,
"step": 794
},
{
"epoch": 2.427480916030534,
"grad_norm": 0.6109589338302612,
"learning_rate": 8.77611986127773e-06,
"loss": 0.2619,
"step": 795
},
{
"epoch": 2.430534351145038,
"grad_norm": 0.6324312090873718,
"learning_rate": 8.772904548213301e-06,
"loss": 0.2453,
"step": 796
},
{
"epoch": 2.433587786259542,
"grad_norm": 0.7132911086082458,
"learning_rate": 8.769685607790091e-06,
"loss": 0.2358,
"step": 797
},
{
"epoch": 2.436641221374046,
"grad_norm": 0.5101016163825989,
"learning_rate": 8.766463043102864e-06,
"loss": 0.27,
"step": 798
},
{
"epoch": 2.4396946564885496,
"grad_norm": 0.5646507143974304,
"learning_rate": 8.76323685724987e-06,
"loss": 0.2369,
"step": 799
},
{
"epoch": 2.4427480916030535,
"grad_norm": 0.5575982332229614,
"learning_rate": 8.760007053332837e-06,
"loss": 0.2459,
"step": 800
},
{
"epoch": 2.445801526717557,
"grad_norm": 0.6391830444335938,
"learning_rate": 8.756773634456975e-06,
"loss": 0.2612,
"step": 801
},
{
"epoch": 2.448854961832061,
"grad_norm": 0.572357177734375,
"learning_rate": 8.75353660373097e-06,
"loss": 0.2477,
"step": 802
},
{
"epoch": 2.451908396946565,
"grad_norm": 0.5150573253631592,
"learning_rate": 8.750295964266979e-06,
"loss": 0.2261,
"step": 803
},
{
"epoch": 2.454961832061069,
"grad_norm": 0.5402485728263855,
"learning_rate": 8.747051719180626e-06,
"loss": 0.2791,
"step": 804
},
{
"epoch": 2.4580152671755724,
"grad_norm": 0.572452187538147,
"learning_rate": 8.743803871591008e-06,
"loss": 0.2466,
"step": 805
},
{
"epoch": 2.4610687022900763,
"grad_norm": 0.7098034024238586,
"learning_rate": 8.740552424620679e-06,
"loss": 0.2411,
"step": 806
},
{
"epoch": 2.4641221374045803,
"grad_norm": 0.7472593188285828,
"learning_rate": 8.737297381395657e-06,
"loss": 0.2707,
"step": 807
},
{
"epoch": 2.467175572519084,
"grad_norm": 0.6873473525047302,
"learning_rate": 8.734038745045419e-06,
"loss": 0.2776,
"step": 808
},
{
"epoch": 2.4702290076335878,
"grad_norm": 0.6122696995735168,
"learning_rate": 8.730776518702891e-06,
"loss": 0.2801,
"step": 809
},
{
"epoch": 2.4732824427480917,
"grad_norm": 0.7205132246017456,
"learning_rate": 8.727510705504453e-06,
"loss": 0.2486,
"step": 810
},
{
"epoch": 2.4763358778625952,
"grad_norm": 0.5110540986061096,
"learning_rate": 8.72424130858994e-06,
"loss": 0.2809,
"step": 811
},
{
"epoch": 2.479389312977099,
"grad_norm": 0.5675752758979797,
"learning_rate": 8.72096833110262e-06,
"loss": 0.2608,
"step": 812
},
{
"epoch": 2.482442748091603,
"grad_norm": 0.6936665773391724,
"learning_rate": 8.717691776189214e-06,
"loss": 0.2799,
"step": 813
},
{
"epoch": 2.4854961832061067,
"grad_norm": 1.0850058794021606,
"learning_rate": 8.714411646999878e-06,
"loss": 0.283,
"step": 814
},
{
"epoch": 2.4885496183206106,
"grad_norm": 0.6592471599578857,
"learning_rate": 8.711127946688207e-06,
"loss": 0.2898,
"step": 815
},
{
"epoch": 2.4916030534351146,
"grad_norm": 0.6823598742485046,
"learning_rate": 8.707840678411223e-06,
"loss": 0.2541,
"step": 816
},
{
"epoch": 2.4946564885496185,
"grad_norm": 0.8594865798950195,
"learning_rate": 8.704549845329386e-06,
"loss": 0.2788,
"step": 817
},
{
"epoch": 2.497709923664122,
"grad_norm": 0.5214030742645264,
"learning_rate": 8.701255450606579e-06,
"loss": 0.2755,
"step": 818
},
{
"epoch": 2.500763358778626,
"grad_norm": 0.6221981048583984,
"learning_rate": 8.69795749741011e-06,
"loss": 0.2439,
"step": 819
},
{
"epoch": 2.5038167938931295,
"grad_norm": 0.6767120957374573,
"learning_rate": 8.694655988910707e-06,
"loss": 0.2713,
"step": 820
},
{
"epoch": 2.5068702290076335,
"grad_norm": 0.6328213214874268,
"learning_rate": 8.69135092828252e-06,
"loss": 0.25,
"step": 821
},
{
"epoch": 2.5099236641221374,
"grad_norm": 0.6158302426338196,
"learning_rate": 8.688042318703111e-06,
"loss": 0.277,
"step": 822
},
{
"epoch": 2.5129770992366414,
"grad_norm": 0.6608135104179382,
"learning_rate": 8.684730163353457e-06,
"loss": 0.2094,
"step": 823
},
{
"epoch": 2.516030534351145,
"grad_norm": 0.7059114575386047,
"learning_rate": 8.681414465417936e-06,
"loss": 0.2944,
"step": 824
},
{
"epoch": 2.519083969465649,
"grad_norm": 0.5691379308700562,
"learning_rate": 8.678095228084343e-06,
"loss": 0.2504,
"step": 825
},
{
"epoch": 2.522137404580153,
"grad_norm": 0.5667744278907776,
"learning_rate": 8.674772454543869e-06,
"loss": 0.2494,
"step": 826
},
{
"epoch": 2.5251908396946563,
"grad_norm": 0.6125538349151611,
"learning_rate": 8.671446147991103e-06,
"loss": 0.2185,
"step": 827
},
{
"epoch": 2.5282442748091603,
"grad_norm": 0.654398500919342,
"learning_rate": 8.66811631162404e-06,
"loss": 0.2873,
"step": 828
},
{
"epoch": 2.531297709923664,
"grad_norm": 0.6519939303398132,
"learning_rate": 8.664782948644058e-06,
"loss": 0.273,
"step": 829
},
{
"epoch": 2.534351145038168,
"grad_norm": 1.465004801750183,
"learning_rate": 8.661446062255931e-06,
"loss": 0.2234,
"step": 830
},
{
"epoch": 2.5374045801526717,
"grad_norm": 1.033259630203247,
"learning_rate": 8.65810565566782e-06,
"loss": 0.267,
"step": 831
},
{
"epoch": 2.5404580152671756,
"grad_norm": 1.0374531745910645,
"learning_rate": 8.654761732091271e-06,
"loss": 0.2469,
"step": 832
},
{
"epoch": 2.543511450381679,
"grad_norm": 0.560542106628418,
"learning_rate": 8.65141429474121e-06,
"loss": 0.2822,
"step": 833
},
{
"epoch": 2.546564885496183,
"grad_norm": 0.8672897815704346,
"learning_rate": 8.648063346835943e-06,
"loss": 0.2483,
"step": 834
},
{
"epoch": 2.549618320610687,
"grad_norm": 0.6460666656494141,
"learning_rate": 8.644708891597147e-06,
"loss": 0.2637,
"step": 835
},
{
"epoch": 2.552671755725191,
"grad_norm": 0.5855495929718018,
"learning_rate": 8.641350932249876e-06,
"loss": 0.2496,
"step": 836
},
{
"epoch": 2.5557251908396945,
"grad_norm": 0.7508133053779602,
"learning_rate": 8.637989472022548e-06,
"loss": 0.2655,
"step": 837
},
{
"epoch": 2.5587786259541985,
"grad_norm": 0.5518255829811096,
"learning_rate": 8.634624514146954e-06,
"loss": 0.2273,
"step": 838
},
{
"epoch": 2.5618320610687024,
"grad_norm": 0.5475079417228699,
"learning_rate": 8.631256061858238e-06,
"loss": 0.2771,
"step": 839
},
{
"epoch": 2.564885496183206,
"grad_norm": 0.7037142515182495,
"learning_rate": 8.627884118394913e-06,
"loss": 0.2698,
"step": 840
},
{
"epoch": 2.56793893129771,
"grad_norm": 0.5983914136886597,
"learning_rate": 8.624508686998846e-06,
"loss": 0.2651,
"step": 841
},
{
"epoch": 2.570992366412214,
"grad_norm": 0.7244035601615906,
"learning_rate": 8.621129770915248e-06,
"loss": 0.3006,
"step": 842
},
{
"epoch": 2.574045801526718,
"grad_norm": 0.6609862446784973,
"learning_rate": 8.617747373392697e-06,
"loss": 0.2924,
"step": 843
},
{
"epoch": 2.5770992366412213,
"grad_norm": 0.7791816592216492,
"learning_rate": 8.614361497683102e-06,
"loss": 0.2487,
"step": 844
},
{
"epoch": 2.5801526717557253,
"grad_norm": 0.6770834922790527,
"learning_rate": 8.61097214704173e-06,
"loss": 0.2958,
"step": 845
},
{
"epoch": 2.583206106870229,
"grad_norm": 0.6960469484329224,
"learning_rate": 8.607579324727175e-06,
"loss": 0.2812,
"step": 846
},
{
"epoch": 2.5862595419847327,
"grad_norm": 0.7780609726905823,
"learning_rate": 8.60418303400138e-06,
"loss": 0.2609,
"step": 847
},
{
"epoch": 2.5893129770992367,
"grad_norm": 0.6630309224128723,
"learning_rate": 8.600783278129617e-06,
"loss": 0.253,
"step": 848
},
{
"epoch": 2.5923664122137406,
"grad_norm": 2.10282039642334,
"learning_rate": 8.597380060380493e-06,
"loss": 0.2582,
"step": 849
},
{
"epoch": 2.595419847328244,
"grad_norm": 0.6513540148735046,
"learning_rate": 8.59397338402594e-06,
"loss": 0.2818,
"step": 850
},
{
"epoch": 2.598473282442748,
"grad_norm": 0.6014916300773621,
"learning_rate": 8.590563252341216e-06,
"loss": 0.2464,
"step": 851
},
{
"epoch": 2.601526717557252,
"grad_norm": 1.0627957582473755,
"learning_rate": 8.5871496686049e-06,
"loss": 0.2592,
"step": 852
},
{
"epoch": 2.6045801526717556,
"grad_norm": 0.884885311126709,
"learning_rate": 8.583732636098895e-06,
"loss": 0.2533,
"step": 853
},
{
"epoch": 2.6076335877862595,
"grad_norm": 0.8026384711265564,
"learning_rate": 8.580312158108413e-06,
"loss": 0.2057,
"step": 854
},
{
"epoch": 2.6106870229007635,
"grad_norm": 0.5773493647575378,
"learning_rate": 8.576888237921983e-06,
"loss": 0.2696,
"step": 855
},
{
"epoch": 2.6137404580152674,
"grad_norm": 0.5091659426689148,
"learning_rate": 8.57346087883144e-06,
"loss": 0.2206,
"step": 856
},
{
"epoch": 2.616793893129771,
"grad_norm": 0.5202175378799438,
"learning_rate": 8.570030084131933e-06,
"loss": 0.2256,
"step": 857
},
{
"epoch": 2.619847328244275,
"grad_norm": 0.6515614986419678,
"learning_rate": 8.566595857121902e-06,
"loss": 0.2426,
"step": 858
},
{
"epoch": 2.6229007633587784,
"grad_norm": 0.6642196774482727,
"learning_rate": 8.563158201103096e-06,
"loss": 0.2861,
"step": 859
},
{
"epoch": 2.6259541984732824,
"grad_norm": 0.7268393635749817,
"learning_rate": 8.559717119380558e-06,
"loss": 0.2393,
"step": 860
},
{
"epoch": 2.6290076335877863,
"grad_norm": 0.698329508304596,
"learning_rate": 8.556272615262623e-06,
"loss": 0.2672,
"step": 861
},
{
"epoch": 2.6320610687022903,
"grad_norm": 0.693202018737793,
"learning_rate": 8.55282469206092e-06,
"loss": 0.2808,
"step": 862
},
{
"epoch": 2.635114503816794,
"grad_norm": 1.0609700679779053,
"learning_rate": 8.549373353090362e-06,
"loss": 0.2643,
"step": 863
},
{
"epoch": 2.6381679389312978,
"grad_norm": 0.5131743550300598,
"learning_rate": 8.545918601669147e-06,
"loss": 0.2503,
"step": 864
},
{
"epoch": 2.6412213740458013,
"grad_norm": 0.7009995579719543,
"learning_rate": 8.542460441118756e-06,
"loss": 0.259,
"step": 865
},
{
"epoch": 2.644274809160305,
"grad_norm": 0.5775540471076965,
"learning_rate": 8.538998874763942e-06,
"loss": 0.2582,
"step": 866
},
{
"epoch": 2.647328244274809,
"grad_norm": 0.5414665937423706,
"learning_rate": 8.535533905932739e-06,
"loss": 0.2575,
"step": 867
},
{
"epoch": 2.650381679389313,
"grad_norm": 0.5874025225639343,
"learning_rate": 8.532065537956446e-06,
"loss": 0.2802,
"step": 868
},
{
"epoch": 2.6534351145038166,
"grad_norm": 0.7705426812171936,
"learning_rate": 8.528593774169637e-06,
"loss": 0.2778,
"step": 869
},
{
"epoch": 2.6564885496183206,
"grad_norm": 0.6250425577163696,
"learning_rate": 8.525118617910144e-06,
"loss": 0.2611,
"step": 870
},
{
"epoch": 2.6595419847328245,
"grad_norm": 0.6487144231796265,
"learning_rate": 8.521640072519066e-06,
"loss": 0.244,
"step": 871
},
{
"epoch": 2.662595419847328,
"grad_norm": 0.6234596371650696,
"learning_rate": 8.518158141340755e-06,
"loss": 0.2478,
"step": 872
},
{
"epoch": 2.665648854961832,
"grad_norm": 0.6986786723136902,
"learning_rate": 8.514672827722824e-06,
"loss": 0.2828,
"step": 873
},
{
"epoch": 2.668702290076336,
"grad_norm": 0.5628014802932739,
"learning_rate": 8.511184135016134e-06,
"loss": 0.2212,
"step": 874
},
{
"epoch": 2.67175572519084,
"grad_norm": 0.5711296796798706,
"learning_rate": 8.507692066574795e-06,
"loss": 0.2773,
"step": 875
},
{
"epoch": 2.6748091603053434,
"grad_norm": 0.5670403838157654,
"learning_rate": 8.504196625756166e-06,
"loss": 0.2593,
"step": 876
},
{
"epoch": 2.6778625954198474,
"grad_norm": 0.8048163652420044,
"learning_rate": 8.500697815920843e-06,
"loss": 0.2843,
"step": 877
},
{
"epoch": 2.680916030534351,
"grad_norm": 0.6336585283279419,
"learning_rate": 8.497195640432664e-06,
"loss": 0.223,
"step": 878
},
{
"epoch": 2.683969465648855,
"grad_norm": 1.0231986045837402,
"learning_rate": 8.493690102658703e-06,
"loss": 0.2693,
"step": 879
},
{
"epoch": 2.687022900763359,
"grad_norm": 0.5733115077018738,
"learning_rate": 8.490181205969268e-06,
"loss": 0.2752,
"step": 880
},
{
"epoch": 2.6900763358778628,
"grad_norm": 0.5859436988830566,
"learning_rate": 8.486668953737891e-06,
"loss": 0.2632,
"step": 881
},
{
"epoch": 2.6931297709923663,
"grad_norm": 0.6292194724082947,
"learning_rate": 8.483153349341336e-06,
"loss": 0.2248,
"step": 882
},
{
"epoch": 2.6961832061068702,
"grad_norm": 0.6594669222831726,
"learning_rate": 8.479634396159587e-06,
"loss": 0.2639,
"step": 883
},
{
"epoch": 2.699236641221374,
"grad_norm": 0.7148462533950806,
"learning_rate": 8.476112097575845e-06,
"loss": 0.2447,
"step": 884
},
{
"epoch": 2.7022900763358777,
"grad_norm": 0.6593231558799744,
"learning_rate": 8.472586456976534e-06,
"loss": 0.2479,
"step": 885
},
{
"epoch": 2.7053435114503817,
"grad_norm": 0.5756161212921143,
"learning_rate": 8.46905747775129e-06,
"loss": 0.282,
"step": 886
},
{
"epoch": 2.7083969465648856,
"grad_norm": 0.7000985145568848,
"learning_rate": 8.465525163292948e-06,
"loss": 0.2414,
"step": 887
},
{
"epoch": 2.7114503816793896,
"grad_norm": 0.6213951706886292,
"learning_rate": 8.461989516997565e-06,
"loss": 0.2254,
"step": 888
},
{
"epoch": 2.714503816793893,
"grad_norm": 0.550736129283905,
"learning_rate": 8.458450542264391e-06,
"loss": 0.2511,
"step": 889
},
{
"epoch": 2.717557251908397,
"grad_norm": 0.5542187094688416,
"learning_rate": 8.45490824249588e-06,
"loss": 0.2663,
"step": 890
},
{
"epoch": 2.7206106870229005,
"grad_norm": 0.8226644992828369,
"learning_rate": 8.45136262109768e-06,
"loss": 0.2789,
"step": 891
},
{
"epoch": 2.7236641221374045,
"grad_norm": 0.7007033228874207,
"learning_rate": 8.447813681478638e-06,
"loss": 0.2716,
"step": 892
},
{
"epoch": 2.7267175572519085,
"grad_norm": 0.8899338841438293,
"learning_rate": 8.444261427050786e-06,
"loss": 0.2637,
"step": 893
},
{
"epoch": 2.7297709923664124,
"grad_norm": 0.5911480784416199,
"learning_rate": 8.440705861229344e-06,
"loss": 0.2863,
"step": 894
},
{
"epoch": 2.732824427480916,
"grad_norm": 0.8226988315582275,
"learning_rate": 8.437146987432717e-06,
"loss": 0.2932,
"step": 895
},
{
"epoch": 2.73587786259542,
"grad_norm": 0.7107616662979126,
"learning_rate": 8.43358480908249e-06,
"loss": 0.2331,
"step": 896
},
{
"epoch": 2.738931297709924,
"grad_norm": 0.6583082675933838,
"learning_rate": 8.430019329603423e-06,
"loss": 0.2678,
"step": 897
},
{
"epoch": 2.7419847328244273,
"grad_norm": 0.6155053377151489,
"learning_rate": 8.426450552423451e-06,
"loss": 0.2593,
"step": 898
},
{
"epoch": 2.7450381679389313,
"grad_norm": 0.5707153081893921,
"learning_rate": 8.422878480973681e-06,
"loss": 0.2643,
"step": 899
},
{
"epoch": 2.7480916030534353,
"grad_norm": 0.6607732772827148,
"learning_rate": 8.41930311868839e-06,
"loss": 0.2816,
"step": 900
},
{
"epoch": 2.751145038167939,
"grad_norm": 0.5974174737930298,
"learning_rate": 8.41572446900501e-06,
"loss": 0.2682,
"step": 901
},
{
"epoch": 2.7541984732824427,
"grad_norm": 0.6799685955047607,
"learning_rate": 8.412142535364139e-06,
"loss": 0.2705,
"step": 902
},
{
"epoch": 2.7572519083969467,
"grad_norm": 0.5892939567565918,
"learning_rate": 8.408557321209534e-06,
"loss": 0.2901,
"step": 903
},
{
"epoch": 2.76030534351145,
"grad_norm": 0.5419011116027832,
"learning_rate": 8.404968829988102e-06,
"loss": 0.2566,
"step": 904
},
{
"epoch": 2.763358778625954,
"grad_norm": 0.808924674987793,
"learning_rate": 8.401377065149904e-06,
"loss": 0.2914,
"step": 905
},
{
"epoch": 2.766412213740458,
"grad_norm": 0.6272004246711731,
"learning_rate": 8.397782030148147e-06,
"loss": 0.265,
"step": 906
},
{
"epoch": 2.769465648854962,
"grad_norm": 0.5678505301475525,
"learning_rate": 8.39418372843918e-06,
"loss": 0.2408,
"step": 907
},
{
"epoch": 2.7725190839694656,
"grad_norm": 0.6624336838722229,
"learning_rate": 8.390582163482497e-06,
"loss": 0.2388,
"step": 908
},
{
"epoch": 2.7755725190839695,
"grad_norm": 0.541344404220581,
"learning_rate": 8.386977338740724e-06,
"loss": 0.2812,
"step": 909
},
{
"epoch": 2.778625954198473,
"grad_norm": 0.5576301217079163,
"learning_rate": 8.383369257679625e-06,
"loss": 0.3016,
"step": 910
},
{
"epoch": 2.781679389312977,
"grad_norm": 0.6158388257026672,
"learning_rate": 8.379757923768094e-06,
"loss": 0.2325,
"step": 911
},
{
"epoch": 2.784732824427481,
"grad_norm": 0.9496983289718628,
"learning_rate": 8.376143340478153e-06,
"loss": 0.2756,
"step": 912
},
{
"epoch": 2.787786259541985,
"grad_norm": 0.6246280670166016,
"learning_rate": 8.372525511284945e-06,
"loss": 0.2922,
"step": 913
},
{
"epoch": 2.7908396946564884,
"grad_norm": 0.6505559086799622,
"learning_rate": 8.368904439666739e-06,
"loss": 0.2814,
"step": 914
},
{
"epoch": 2.7938931297709924,
"grad_norm": 0.6996026039123535,
"learning_rate": 8.365280129104912e-06,
"loss": 0.262,
"step": 915
},
{
"epoch": 2.7969465648854963,
"grad_norm": 0.7569531798362732,
"learning_rate": 8.361652583083968e-06,
"loss": 0.2558,
"step": 916
},
{
"epoch": 2.8,
"grad_norm": 0.6053115725517273,
"learning_rate": 8.358021805091509e-06,
"loss": 0.2621,
"step": 917
},
{
"epoch": 2.8030534351145038,
"grad_norm": 0.5664728879928589,
"learning_rate": 8.354387798618254e-06,
"loss": 0.287,
"step": 918
},
{
"epoch": 2.8061068702290077,
"grad_norm": 0.6129698753356934,
"learning_rate": 8.35075056715802e-06,
"loss": 0.2594,
"step": 919
},
{
"epoch": 2.8091603053435117,
"grad_norm": 0.5830501914024353,
"learning_rate": 8.347110114207727e-06,
"loss": 0.2599,
"step": 920
},
{
"epoch": 2.812213740458015,
"grad_norm": 0.6474660038948059,
"learning_rate": 8.34346644326739e-06,
"loss": 0.3093,
"step": 921
},
{
"epoch": 2.815267175572519,
"grad_norm": 0.583742082118988,
"learning_rate": 8.339819557840124e-06,
"loss": 0.2668,
"step": 922
},
{
"epoch": 2.8183206106870227,
"grad_norm": 0.8714466094970703,
"learning_rate": 8.336169461432125e-06,
"loss": 0.2425,
"step": 923
},
{
"epoch": 2.8213740458015266,
"grad_norm": 0.6026268005371094,
"learning_rate": 8.332516157552684e-06,
"loss": 0.2377,
"step": 924
},
{
"epoch": 2.8244274809160306,
"grad_norm": 0.7545271515846252,
"learning_rate": 8.328859649714171e-06,
"loss": 0.2532,
"step": 925
},
{
"epoch": 2.8274809160305345,
"grad_norm": 0.5356566905975342,
"learning_rate": 8.32519994143204e-06,
"loss": 0.2442,
"step": 926
},
{
"epoch": 2.830534351145038,
"grad_norm": 0.5675045847892761,
"learning_rate": 8.321537036224822e-06,
"loss": 0.2682,
"step": 927
},
{
"epoch": 2.833587786259542,
"grad_norm": 0.5817980170249939,
"learning_rate": 8.317870937614115e-06,
"loss": 0.275,
"step": 928
},
{
"epoch": 2.836641221374046,
"grad_norm": 0.5928219556808472,
"learning_rate": 8.314201649124595e-06,
"loss": 0.293,
"step": 929
},
{
"epoch": 2.8396946564885495,
"grad_norm": 0.6520822048187256,
"learning_rate": 8.310529174284004e-06,
"loss": 0.2855,
"step": 930
},
{
"epoch": 2.8427480916030534,
"grad_norm": 0.6014900207519531,
"learning_rate": 8.30685351662314e-06,
"loss": 0.2873,
"step": 931
},
{
"epoch": 2.8458015267175574,
"grad_norm": 0.6576688885688782,
"learning_rate": 8.30317467967587e-06,
"loss": 0.2553,
"step": 932
},
{
"epoch": 2.8488549618320613,
"grad_norm": 0.555482029914856,
"learning_rate": 8.299492666979114e-06,
"loss": 0.2322,
"step": 933
},
{
"epoch": 2.851908396946565,
"grad_norm": 0.6716350317001343,
"learning_rate": 8.295807482072842e-06,
"loss": 0.2413,
"step": 934
},
{
"epoch": 2.854961832061069,
"grad_norm": 0.5515463352203369,
"learning_rate": 8.292119128500082e-06,
"loss": 0.2378,
"step": 935
},
{
"epoch": 2.8580152671755723,
"grad_norm": 0.6711946725845337,
"learning_rate": 8.288427609806899e-06,
"loss": 0.2852,
"step": 936
},
{
"epoch": 2.8610687022900763,
"grad_norm": 0.637618899345398,
"learning_rate": 8.28473292954241e-06,
"loss": 0.2409,
"step": 937
},
{
"epoch": 2.86412213740458,
"grad_norm": 0.6410360932350159,
"learning_rate": 8.281035091258762e-06,
"loss": 0.2381,
"step": 938
},
{
"epoch": 2.867175572519084,
"grad_norm": 0.5831419825553894,
"learning_rate": 8.277334098511147e-06,
"loss": 0.2885,
"step": 939
},
{
"epoch": 2.8702290076335877,
"grad_norm": 0.5531440377235413,
"learning_rate": 8.273629954857784e-06,
"loss": 0.2581,
"step": 940
},
{
"epoch": 2.8732824427480916,
"grad_norm": 0.603334367275238,
"learning_rate": 8.269922663859926e-06,
"loss": 0.269,
"step": 941
},
{
"epoch": 2.876335877862595,
"grad_norm": 0.6137884259223938,
"learning_rate": 8.266212229081846e-06,
"loss": 0.2694,
"step": 942
},
{
"epoch": 2.879389312977099,
"grad_norm": 0.6482293009757996,
"learning_rate": 8.262498654090846e-06,
"loss": 0.2622,
"step": 943
},
{
"epoch": 2.882442748091603,
"grad_norm": 0.7027099132537842,
"learning_rate": 8.258781942457244e-06,
"loss": 0.2717,
"step": 944
},
{
"epoch": 2.885496183206107,
"grad_norm": 0.7886291742324829,
"learning_rate": 8.255062097754371e-06,
"loss": 0.2836,
"step": 945
},
{
"epoch": 2.8885496183206105,
"grad_norm": 0.549103319644928,
"learning_rate": 8.251339123558573e-06,
"loss": 0.2542,
"step": 946
},
{
"epoch": 2.8916030534351145,
"grad_norm": 0.5483999252319336,
"learning_rate": 8.247613023449209e-06,
"loss": 0.2866,
"step": 947
},
{
"epoch": 2.8946564885496184,
"grad_norm": 0.6661820411682129,
"learning_rate": 8.243883801008632e-06,
"loss": 0.258,
"step": 948
},
{
"epoch": 2.897709923664122,
"grad_norm": 0.6079604029655457,
"learning_rate": 8.240151459822207e-06,
"loss": 0.2638,
"step": 949
},
{
"epoch": 2.900763358778626,
"grad_norm": 0.7714946866035461,
"learning_rate": 8.236416003478295e-06,
"loss": 0.2776,
"step": 950
},
{
"epoch": 2.90381679389313,
"grad_norm": 0.6866840124130249,
"learning_rate": 8.232677435568252e-06,
"loss": 0.2768,
"step": 951
},
{
"epoch": 2.906870229007634,
"grad_norm": 0.7013977766036987,
"learning_rate": 8.228935759686424e-06,
"loss": 0.2459,
"step": 952
},
{
"epoch": 2.9099236641221373,
"grad_norm": 0.5634531378746033,
"learning_rate": 8.225190979430145e-06,
"loss": 0.265,
"step": 953
},
{
"epoch": 2.9129770992366413,
"grad_norm": 0.7142422199249268,
"learning_rate": 8.221443098399733e-06,
"loss": 0.2876,
"step": 954
},
{
"epoch": 2.916030534351145,
"grad_norm": 0.5215662121772766,
"learning_rate": 8.217692120198492e-06,
"loss": 0.2434,
"step": 955
},
{
"epoch": 2.9190839694656487,
"grad_norm": 0.5616928935050964,
"learning_rate": 8.213938048432697e-06,
"loss": 0.2694,
"step": 956
},
{
"epoch": 2.9221374045801527,
"grad_norm": 0.68199223279953,
"learning_rate": 8.210180886711603e-06,
"loss": 0.2842,
"step": 957
},
{
"epoch": 2.9251908396946567,
"grad_norm": 0.6951640248298645,
"learning_rate": 8.206420638647433e-06,
"loss": 0.2558,
"step": 958
},
{
"epoch": 2.92824427480916,
"grad_norm": 0.678872287273407,
"learning_rate": 8.202657307855376e-06,
"loss": 0.2379,
"step": 959
},
{
"epoch": 2.931297709923664,
"grad_norm": 0.5897531509399414,
"learning_rate": 8.198890897953586e-06,
"loss": 0.2519,
"step": 960
},
{
"epoch": 2.934351145038168,
"grad_norm": 0.5907174944877625,
"learning_rate": 8.19512141256318e-06,
"loss": 0.2526,
"step": 961
},
{
"epoch": 2.9374045801526716,
"grad_norm": 0.5894899964332581,
"learning_rate": 8.191348855308229e-06,
"loss": 0.2515,
"step": 962
},
{
"epoch": 2.9404580152671755,
"grad_norm": 0.7135964632034302,
"learning_rate": 8.187573229815757e-06,
"loss": 0.2702,
"step": 963
},
{
"epoch": 2.9435114503816795,
"grad_norm": 0.5521953701972961,
"learning_rate": 8.18379453971574e-06,
"loss": 0.2346,
"step": 964
},
{
"epoch": 2.9465648854961835,
"grad_norm": 0.610332190990448,
"learning_rate": 8.180012788641097e-06,
"loss": 0.2655,
"step": 965
},
{
"epoch": 2.949618320610687,
"grad_norm": 0.6203378438949585,
"learning_rate": 8.176227980227693e-06,
"loss": 0.2415,
"step": 966
},
{
"epoch": 2.952671755725191,
"grad_norm": 1.4141615629196167,
"learning_rate": 8.172440118114332e-06,
"loss": 0.2516,
"step": 967
},
{
"epoch": 2.9557251908396944,
"grad_norm": 0.5614278316497803,
"learning_rate": 8.168649205942753e-06,
"loss": 0.2697,
"step": 968
},
{
"epoch": 2.9587786259541984,
"grad_norm": 0.6798096299171448,
"learning_rate": 8.164855247357628e-06,
"loss": 0.2516,
"step": 969
},
{
"epoch": 2.9618320610687023,
"grad_norm": 0.7112230658531189,
"learning_rate": 8.161058246006558e-06,
"loss": 0.2801,
"step": 970
},
{
"epoch": 2.9648854961832063,
"grad_norm": 0.5740217566490173,
"learning_rate": 8.157258205540069e-06,
"loss": 0.2829,
"step": 971
},
{
"epoch": 2.96793893129771,
"grad_norm": 0.6302188038825989,
"learning_rate": 8.153455129611605e-06,
"loss": 0.2493,
"step": 972
},
{
"epoch": 2.9709923664122138,
"grad_norm": 0.7453956604003906,
"learning_rate": 8.14964902187754e-06,
"loss": 0.2934,
"step": 973
},
{
"epoch": 2.9740458015267177,
"grad_norm": 0.5962462425231934,
"learning_rate": 8.145839885997146e-06,
"loss": 0.3061,
"step": 974
},
{
"epoch": 2.9770992366412212,
"grad_norm": 0.7513747215270996,
"learning_rate": 8.142027725632622e-06,
"loss": 0.2699,
"step": 975
},
{
"epoch": 2.980152671755725,
"grad_norm": 0.5672812461853027,
"learning_rate": 8.138212544449067e-06,
"loss": 0.2492,
"step": 976
},
{
"epoch": 2.983206106870229,
"grad_norm": 1.0813274383544922,
"learning_rate": 8.134394346114486e-06,
"loss": 0.2632,
"step": 977
},
{
"epoch": 2.986259541984733,
"grad_norm": 0.6245205998420715,
"learning_rate": 8.130573134299782e-06,
"loss": 0.2243,
"step": 978
},
{
"epoch": 2.9893129770992366,
"grad_norm": 0.6279330849647522,
"learning_rate": 8.126748912678757e-06,
"loss": 0.2546,
"step": 979
},
{
"epoch": 2.9923664122137406,
"grad_norm": 0.5319833755493164,
"learning_rate": 8.122921684928111e-06,
"loss": 0.2569,
"step": 980
},
{
"epoch": 2.995419847328244,
"grad_norm": 0.647398829460144,
"learning_rate": 8.119091454727427e-06,
"loss": 0.2931,
"step": 981
},
{
"epoch": 2.998473282442748,
"grad_norm": 0.5240838527679443,
"learning_rate": 8.11525822575918e-06,
"loss": 0.2863,
"step": 982
},
{
"epoch": 3.001526717557252,
"grad_norm": 0.596959114074707,
"learning_rate": 8.111422001708725e-06,
"loss": 0.2086,
"step": 983
},
{
"epoch": 3.004580152671756,
"grad_norm": 0.6433384418487549,
"learning_rate": 8.107582786264299e-06,
"loss": 0.1988,
"step": 984
}
],
"logging_steps": 1.0,
"max_steps": 3270,
"num_input_tokens_seen": 0,
"num_train_epochs": 10,
"save_steps": 164,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 3.217804759470493e+19,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}