TracyTank commited on
Commit
47bec1c
·
verified ·
1 Parent(s): a156248

Training in progress, step 440, checkpoint

Browse files
last-checkpoint/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:13bfd00368c2e0f1b66a0977927d7deefbdedfda4a029db1cfecf7121489316e
3
  size 4286680
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3eb5c3228d5f169d3e566f341e1de01ec7e78408c8fecbc1639b3c2b3cbec950
3
  size 4286680
last-checkpoint/optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ea68f69796a8fc7439cb40ea2826101f1f73e514724e27f30cb379db3ccc44a5
3
  size 8583659
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:58c37d2fccdce3632e266132ad2b89189f9a9e1ca4b9fd0df854408688d6ab55
3
  size 8583659
last-checkpoint/rng_state_0.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9ff7923d27ecc0fb4a57b135231bb75b243036007e1509b1f5b68b3f00d05b77
3
  size 15024
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cd028d3cffde00dd4a7c26cac86475631732cdb6a5cc82b9a5501c0ce419bf57
3
  size 15024
last-checkpoint/rng_state_1.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ae3f8f572e58735dae0aa7264e1fb7ae105be606ff415bb5dd2ae3ee3f49df3a
3
  size 15024
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5cd4496eeda825da88221f896955530b92634bc738e1e71ab630e2caa9dc3167
3
  size 15024
last-checkpoint/rng_state_2.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:26c1866c86adc7c136484215b00cd37d53343f5e3ef8267c99194dbd76c70849
3
  size 15024
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:84224df08fab8840835a04924367865c53711750c088c8d49882edab4dbeea47
3
  size 15024
last-checkpoint/rng_state_3.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e44d5c8f79db099fe2d32bdb9956b659df4a75a8602e949459f8f94c58bfc3ec
3
  size 15024
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a5a19b7546bf0a06091a07d7a5fd8a6937ce53b062869e943ea4f610c95f3f14
3
  size 15024
last-checkpoint/scheduler.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e7230e3e2f1529a9e05ef1196810d221e9f653e155d980c6f33f63bdbf58f8cc
3
  size 1064
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3aa6d4cd0a1d119d88746df8d17b061da99249879d9cb64d05543ac4d112a2c5
3
  size 1064
last-checkpoint/trainer_state.json CHANGED
@@ -1,9 +1,9 @@
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
- "epoch": 0.5428733674048836,
5
  "eval_steps": 500,
6
- "global_step": 239,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
@@ -1680,6 +1680,1413 @@
1680
  "learning_rate": 6.412692147824976e-05,
1681
  "loss": 39.8384,
1682
  "step": 239
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1683
  }
1684
  ],
1685
  "logging_steps": 1,
@@ -1694,12 +3101,12 @@
1694
  "should_evaluate": false,
1695
  "should_log": false,
1696
  "should_save": true,
1697
- "should_training_stop": false
1698
  },
1699
  "attributes": {}
1700
  }
1701
  },
1702
- "total_flos": 442470695436288.0,
1703
  "train_batch_size": 4,
1704
  "trial_name": null,
1705
  "trial_params": null
 
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
+ "epoch": 0.9994321408290744,
5
  "eval_steps": 500,
6
+ "global_step": 440,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
 
1680
  "learning_rate": 6.412692147824976e-05,
1681
  "loss": 39.8384,
1682
  "step": 239
1683
+ },
1684
+ {
1685
+ "epoch": 0.545144804088586,
1686
+ "grad_norm": 3.1766092777252197,
1687
+ "learning_rate": 6.368314950360415e-05,
1688
+ "loss": 39.8386,
1689
+ "step": 240
1690
+ },
1691
+ {
1692
+ "epoch": 0.5474162407722885,
1693
+ "grad_norm": 3.190483808517456,
1694
+ "learning_rate": 6.323820930826879e-05,
1695
+ "loss": 39.8017,
1696
+ "step": 241
1697
+ },
1698
+ {
1699
+ "epoch": 0.5496876774559909,
1700
+ "grad_norm": 3.2229549884796143,
1701
+ "learning_rate": 6.279213887972179e-05,
1702
+ "loss": 39.7668,
1703
+ "step": 242
1704
+ },
1705
+ {
1706
+ "epoch": 0.5519591141396933,
1707
+ "grad_norm": 3.0010106563568115,
1708
+ "learning_rate": 6.234497630193666e-05,
1709
+ "loss": 39.9786,
1710
+ "step": 243
1711
+ },
1712
+ {
1713
+ "epoch": 0.5542305508233958,
1714
+ "grad_norm": 3.1917195320129395,
1715
+ "learning_rate": 6.189675975213094e-05,
1716
+ "loss": 39.812,
1717
+ "step": 244
1718
+ },
1719
+ {
1720
+ "epoch": 0.5565019875070982,
1721
+ "grad_norm": 3.4181742668151855,
1722
+ "learning_rate": 6.14475274975067e-05,
1723
+ "loss": 39.8809,
1724
+ "step": 245
1725
+ },
1726
+ {
1727
+ "epoch": 0.5587734241908007,
1728
+ "grad_norm": 3.112577438354492,
1729
+ "learning_rate": 6.099731789198344e-05,
1730
+ "loss": 39.8375,
1731
+ "step": 246
1732
+ },
1733
+ {
1734
+ "epoch": 0.5610448608745031,
1735
+ "grad_norm": 3.208275079727173,
1736
+ "learning_rate": 6.05461693729235e-05,
1737
+ "loss": 39.763,
1738
+ "step": 247
1739
+ },
1740
+ {
1741
+ "epoch": 0.5633162975582056,
1742
+ "grad_norm": 3.286311388015747,
1743
+ "learning_rate": 6.009412045785051e-05,
1744
+ "loss": 39.7137,
1745
+ "step": 248
1746
+ },
1747
+ {
1748
+ "epoch": 0.565587734241908,
1749
+ "grad_norm": 3.2906556129455566,
1750
+ "learning_rate": 5.9641209741160855e-05,
1751
+ "loss": 39.6924,
1752
+ "step": 249
1753
+ },
1754
+ {
1755
+ "epoch": 0.5678591709256104,
1756
+ "grad_norm": 3.3440194129943848,
1757
+ "learning_rate": 5.918747589082853e-05,
1758
+ "loss": 39.8044,
1759
+ "step": 250
1760
+ },
1761
+ {
1762
+ "epoch": 0.5701306076093129,
1763
+ "grad_norm": 3.2966935634613037,
1764
+ "learning_rate": 5.873295764510395e-05,
1765
+ "loss": 39.6652,
1766
+ "step": 251
1767
+ },
1768
+ {
1769
+ "epoch": 0.5724020442930153,
1770
+ "grad_norm": 3.207146644592285,
1771
+ "learning_rate": 5.82776938092065e-05,
1772
+ "loss": 39.7085,
1773
+ "step": 252
1774
+ },
1775
+ {
1776
+ "epoch": 0.5746734809767178,
1777
+ "grad_norm": 3.005434513092041,
1778
+ "learning_rate": 5.782172325201155e-05,
1779
+ "loss": 39.7882,
1780
+ "step": 253
1781
+ },
1782
+ {
1783
+ "epoch": 0.5769449176604202,
1784
+ "grad_norm": 3.200674295425415,
1785
+ "learning_rate": 5.736508490273188e-05,
1786
+ "loss": 39.8991,
1787
+ "step": 254
1788
+ },
1789
+ {
1790
+ "epoch": 0.5792163543441227,
1791
+ "grad_norm": 3.2107930183410645,
1792
+ "learning_rate": 5.6907817747594116e-05,
1793
+ "loss": 39.6958,
1794
+ "step": 255
1795
+ },
1796
+ {
1797
+ "epoch": 0.5814877910278251,
1798
+ "grad_norm": 3.239307165145874,
1799
+ "learning_rate": 5.644996082651017e-05,
1800
+ "loss": 39.6615,
1801
+ "step": 256
1802
+ },
1803
+ {
1804
+ "epoch": 0.5837592277115276,
1805
+ "grad_norm": 3.256614923477173,
1806
+ "learning_rate": 5.599155322974417e-05,
1807
+ "loss": 39.5822,
1808
+ "step": 257
1809
+ },
1810
+ {
1811
+ "epoch": 0.58603066439523,
1812
+ "grad_norm": 3.1306862831115723,
1813
+ "learning_rate": 5.553263409457504e-05,
1814
+ "loss": 39.7317,
1815
+ "step": 258
1816
+ },
1817
+ {
1818
+ "epoch": 0.5883021010789324,
1819
+ "grad_norm": 3.114198923110962,
1820
+ "learning_rate": 5.507324260195515e-05,
1821
+ "loss": 39.654,
1822
+ "step": 259
1823
+ },
1824
+ {
1825
+ "epoch": 0.5905735377626349,
1826
+ "grad_norm": 3.288999319076538,
1827
+ "learning_rate": 5.4613417973165106e-05,
1828
+ "loss": 39.5748,
1829
+ "step": 260
1830
+ },
1831
+ {
1832
+ "epoch": 0.5928449744463373,
1833
+ "grad_norm": 3.4110300540924072,
1834
+ "learning_rate": 5.41531994664652e-05,
1835
+ "loss": 39.4919,
1836
+ "step": 261
1837
+ },
1838
+ {
1839
+ "epoch": 0.5951164111300398,
1840
+ "grad_norm": 3.2709226608276367,
1841
+ "learning_rate": 5.3692626373743706e-05,
1842
+ "loss": 39.5359,
1843
+ "step": 262
1844
+ },
1845
+ {
1846
+ "epoch": 0.5973878478137422,
1847
+ "grad_norm": 3.0505809783935547,
1848
+ "learning_rate": 5.3231738017162214e-05,
1849
+ "loss": 39.6559,
1850
+ "step": 263
1851
+ },
1852
+ {
1853
+ "epoch": 0.5996592844974447,
1854
+ "grad_norm": 3.287243604660034,
1855
+ "learning_rate": 5.27705737457985e-05,
1856
+ "loss": 39.5388,
1857
+ "step": 264
1858
+ },
1859
+ {
1860
+ "epoch": 0.6019307211811471,
1861
+ "grad_norm": 3.1598408222198486,
1862
+ "learning_rate": 5.230917293228699e-05,
1863
+ "loss": 39.5886,
1864
+ "step": 265
1865
+ },
1866
+ {
1867
+ "epoch": 0.6042021578648495,
1868
+ "grad_norm": 3.160259962081909,
1869
+ "learning_rate": 5.184757496945726e-05,
1870
+ "loss": 39.6173,
1871
+ "step": 266
1872
+ },
1873
+ {
1874
+ "epoch": 0.606473594548552,
1875
+ "grad_norm": 3.2377989292144775,
1876
+ "learning_rate": 5.138581926697082e-05,
1877
+ "loss": 39.5624,
1878
+ "step": 267
1879
+ },
1880
+ {
1881
+ "epoch": 0.6087450312322544,
1882
+ "grad_norm": 3.2241992950439453,
1883
+ "learning_rate": 5.092394524795649e-05,
1884
+ "loss": 39.5781,
1885
+ "step": 268
1886
+ },
1887
+ {
1888
+ "epoch": 0.6110164679159569,
1889
+ "grad_norm": 3.2245700359344482,
1890
+ "learning_rate": 5.046199234564455e-05,
1891
+ "loss": 39.544,
1892
+ "step": 269
1893
+ },
1894
+ {
1895
+ "epoch": 0.6132879045996593,
1896
+ "grad_norm": 3.128326177597046,
1897
+ "learning_rate": 5e-05,
1898
+ "loss": 39.6007,
1899
+ "step": 270
1900
+ },
1901
+ {
1902
+ "epoch": 0.6155593412833618,
1903
+ "grad_norm": 3.128420829772949,
1904
+ "learning_rate": 4.953800765435547e-05,
1905
+ "loss": 39.612,
1906
+ "step": 271
1907
+ },
1908
+ {
1909
+ "epoch": 0.6178307779670642,
1910
+ "grad_norm": 3.2072839736938477,
1911
+ "learning_rate": 4.907605475204352e-05,
1912
+ "loss": 39.5239,
1913
+ "step": 272
1914
+ },
1915
+ {
1916
+ "epoch": 0.6201022146507666,
1917
+ "grad_norm": 3.284583568572998,
1918
+ "learning_rate": 4.861418073302919e-05,
1919
+ "loss": 39.4844,
1920
+ "step": 273
1921
+ },
1922
+ {
1923
+ "epoch": 0.6223736513344691,
1924
+ "grad_norm": 3.2405998706817627,
1925
+ "learning_rate": 4.8152425030542766e-05,
1926
+ "loss": 39.5103,
1927
+ "step": 274
1928
+ },
1929
+ {
1930
+ "epoch": 0.6246450880181715,
1931
+ "grad_norm": 3.397861957550049,
1932
+ "learning_rate": 4.7690827067713035e-05,
1933
+ "loss": 39.6217,
1934
+ "step": 275
1935
+ },
1936
+ {
1937
+ "epoch": 0.626916524701874,
1938
+ "grad_norm": 3.4319255352020264,
1939
+ "learning_rate": 4.72294262542015e-05,
1940
+ "loss": 39.4299,
1941
+ "step": 276
1942
+ },
1943
+ {
1944
+ "epoch": 0.6291879613855764,
1945
+ "grad_norm": 3.159431219100952,
1946
+ "learning_rate": 4.676826198283779e-05,
1947
+ "loss": 39.534,
1948
+ "step": 277
1949
+ },
1950
+ {
1951
+ "epoch": 0.6314593980692789,
1952
+ "grad_norm": 3.191432237625122,
1953
+ "learning_rate": 4.6307373626256306e-05,
1954
+ "loss": 39.5237,
1955
+ "step": 278
1956
+ },
1957
+ {
1958
+ "epoch": 0.6337308347529813,
1959
+ "grad_norm": 3.1432559490203857,
1960
+ "learning_rate": 4.5846800533534815e-05,
1961
+ "loss": 39.5283,
1962
+ "step": 279
1963
+ },
1964
+ {
1965
+ "epoch": 0.6360022714366838,
1966
+ "grad_norm": 3.033679723739624,
1967
+ "learning_rate": 4.5386582026834906e-05,
1968
+ "loss": 39.61,
1969
+ "step": 280
1970
+ },
1971
+ {
1972
+ "epoch": 0.6382737081203862,
1973
+ "grad_norm": 3.223874568939209,
1974
+ "learning_rate": 4.492675739804486e-05,
1975
+ "loss": 39.5168,
1976
+ "step": 281
1977
+ },
1978
+ {
1979
+ "epoch": 0.6405451448040886,
1980
+ "grad_norm": 3.3325939178466797,
1981
+ "learning_rate": 4.446736590542497e-05,
1982
+ "loss": 39.4294,
1983
+ "step": 282
1984
+ },
1985
+ {
1986
+ "epoch": 0.6428165814877911,
1987
+ "grad_norm": 3.082111120223999,
1988
+ "learning_rate": 4.400844677025585e-05,
1989
+ "loss": 39.5168,
1990
+ "step": 283
1991
+ },
1992
+ {
1993
+ "epoch": 0.6450880181714934,
1994
+ "grad_norm": 3.315622091293335,
1995
+ "learning_rate": 4.3550039173489845e-05,
1996
+ "loss": 39.4095,
1997
+ "step": 284
1998
+ },
1999
+ {
2000
+ "epoch": 0.6473594548551959,
2001
+ "grad_norm": 3.269645929336548,
2002
+ "learning_rate": 4.30921822524059e-05,
2003
+ "loss": 39.4171,
2004
+ "step": 285
2005
+ },
2006
+ {
2007
+ "epoch": 0.6496308915388983,
2008
+ "grad_norm": 3.3954954147338867,
2009
+ "learning_rate": 4.2634915097268115e-05,
2010
+ "loss": 39.3404,
2011
+ "step": 286
2012
+ },
2013
+ {
2014
+ "epoch": 0.6519023282226007,
2015
+ "grad_norm": 3.350980520248413,
2016
+ "learning_rate": 4.2178276747988446e-05,
2017
+ "loss": 39.3607,
2018
+ "step": 287
2019
+ },
2020
+ {
2021
+ "epoch": 0.6541737649063032,
2022
+ "grad_norm": 3.196150302886963,
2023
+ "learning_rate": 4.1722306190793495e-05,
2024
+ "loss": 39.3995,
2025
+ "step": 288
2026
+ },
2027
+ {
2028
+ "epoch": 0.6564452015900056,
2029
+ "grad_norm": 3.216571807861328,
2030
+ "learning_rate": 4.1267042354896056e-05,
2031
+ "loss": 39.4511,
2032
+ "step": 289
2033
+ },
2034
+ {
2035
+ "epoch": 0.6587166382737081,
2036
+ "grad_norm": 3.1120824813842773,
2037
+ "learning_rate": 4.0812524109171476e-05,
2038
+ "loss": 39.5224,
2039
+ "step": 290
2040
+ },
2041
+ {
2042
+ "epoch": 0.6609880749574105,
2043
+ "grad_norm": 3.1584837436676025,
2044
+ "learning_rate": 4.0358790258839164e-05,
2045
+ "loss": 39.4652,
2046
+ "step": 291
2047
+ },
2048
+ {
2049
+ "epoch": 0.663259511641113,
2050
+ "grad_norm": 3.1293344497680664,
2051
+ "learning_rate": 3.99058795421495e-05,
2052
+ "loss": 39.4972,
2053
+ "step": 292
2054
+ },
2055
+ {
2056
+ "epoch": 0.6655309483248154,
2057
+ "grad_norm": 3.218752145767212,
2058
+ "learning_rate": 3.9453830627076516e-05,
2059
+ "loss": 39.4812,
2060
+ "step": 293
2061
+ },
2062
+ {
2063
+ "epoch": 0.6678023850085179,
2064
+ "grad_norm": 3.051319122314453,
2065
+ "learning_rate": 3.9002682108016585e-05,
2066
+ "loss": 39.533,
2067
+ "step": 294
2068
+ },
2069
+ {
2070
+ "epoch": 0.6700738216922203,
2071
+ "grad_norm": 3.1116693019866943,
2072
+ "learning_rate": 3.855247250249331e-05,
2073
+ "loss": 39.4644,
2074
+ "step": 295
2075
+ },
2076
+ {
2077
+ "epoch": 0.6723452583759227,
2078
+ "grad_norm": 3.175712823867798,
2079
+ "learning_rate": 3.8103240247869075e-05,
2080
+ "loss": 39.4329,
2081
+ "step": 296
2082
+ },
2083
+ {
2084
+ "epoch": 0.6746166950596252,
2085
+ "grad_norm": 3.2702810764312744,
2086
+ "learning_rate": 3.765502369806334e-05,
2087
+ "loss": 39.3459,
2088
+ "step": 297
2089
+ },
2090
+ {
2091
+ "epoch": 0.6768881317433276,
2092
+ "grad_norm": 3.3634626865386963,
2093
+ "learning_rate": 3.720786112027822e-05,
2094
+ "loss": 39.3088,
2095
+ "step": 298
2096
+ },
2097
+ {
2098
+ "epoch": 0.6791595684270301,
2099
+ "grad_norm": 3.2262613773345947,
2100
+ "learning_rate": 3.676179069173121e-05,
2101
+ "loss": 39.3954,
2102
+ "step": 299
2103
+ },
2104
+ {
2105
+ "epoch": 0.6814310051107325,
2106
+ "grad_norm": 3.2690017223358154,
2107
+ "learning_rate": 3.631685049639586e-05,
2108
+ "loss": 39.3951,
2109
+ "step": 300
2110
+ },
2111
+ {
2112
+ "epoch": 0.683702441794435,
2113
+ "grad_norm": 3.257148265838623,
2114
+ "learning_rate": 3.5873078521750246e-05,
2115
+ "loss": 39.4106,
2116
+ "step": 301
2117
+ },
2118
+ {
2119
+ "epoch": 0.6859738784781374,
2120
+ "grad_norm": 3.2068793773651123,
2121
+ "learning_rate": 3.543051265553377e-05,
2122
+ "loss": 39.3841,
2123
+ "step": 302
2124
+ },
2125
+ {
2126
+ "epoch": 0.6882453151618398,
2127
+ "grad_norm": 3.2364015579223633,
2128
+ "learning_rate": 3.498919068251237e-05,
2129
+ "loss": 39.3645,
2130
+ "step": 303
2131
+ },
2132
+ {
2133
+ "epoch": 0.6905167518455423,
2134
+ "grad_norm": 3.2378106117248535,
2135
+ "learning_rate": 3.4549150281252636e-05,
2136
+ "loss": 39.3788,
2137
+ "step": 304
2138
+ },
2139
+ {
2140
+ "epoch": 0.6927881885292447,
2141
+ "grad_norm": 3.2840211391448975,
2142
+ "learning_rate": 3.411042902090492e-05,
2143
+ "loss": 39.3334,
2144
+ "step": 305
2145
+ },
2146
+ {
2147
+ "epoch": 0.6950596252129472,
2148
+ "grad_norm": 3.144357442855835,
2149
+ "learning_rate": 3.367306435799584e-05,
2150
+ "loss": 39.3654,
2151
+ "step": 306
2152
+ },
2153
+ {
2154
+ "epoch": 0.6973310618966496,
2155
+ "grad_norm": 3.175121545791626,
2156
+ "learning_rate": 3.323709363323032e-05,
2157
+ "loss": 39.4134,
2158
+ "step": 307
2159
+ },
2160
+ {
2161
+ "epoch": 0.6996024985803521,
2162
+ "grad_norm": 3.283843517303467,
2163
+ "learning_rate": 3.2802554068303596e-05,
2164
+ "loss": 39.3028,
2165
+ "step": 308
2166
+ },
2167
+ {
2168
+ "epoch": 0.7018739352640545,
2169
+ "grad_norm": 3.300076961517334,
2170
+ "learning_rate": 3.236948276272337e-05,
2171
+ "loss": 39.3181,
2172
+ "step": 309
2173
+ },
2174
+ {
2175
+ "epoch": 0.704145371947757,
2176
+ "grad_norm": 3.330923318862915,
2177
+ "learning_rate": 3.1937916690642356e-05,
2178
+ "loss": 39.2996,
2179
+ "step": 310
2180
+ },
2181
+ {
2182
+ "epoch": 0.7064168086314594,
2183
+ "grad_norm": 3.320736885070801,
2184
+ "learning_rate": 3.150789269770155e-05,
2185
+ "loss": 39.3073,
2186
+ "step": 311
2187
+ },
2188
+ {
2189
+ "epoch": 0.7086882453151618,
2190
+ "grad_norm": 3.3675241470336914,
2191
+ "learning_rate": 3.107944749788449e-05,
2192
+ "loss": 39.298,
2193
+ "step": 312
2194
+ },
2195
+ {
2196
+ "epoch": 0.7109596819988643,
2197
+ "grad_norm": 3.191697597503662,
2198
+ "learning_rate": 3.065261767038275e-05,
2199
+ "loss": 39.3497,
2200
+ "step": 313
2201
+ },
2202
+ {
2203
+ "epoch": 0.7132311186825667,
2204
+ "grad_norm": 3.270242214202881,
2205
+ "learning_rate": 3.0227439656472877e-05,
2206
+ "loss": 39.3073,
2207
+ "step": 314
2208
+ },
2209
+ {
2210
+ "epoch": 0.7155025553662692,
2211
+ "grad_norm": 3.160560131072998,
2212
+ "learning_rate": 2.980394975640526e-05,
2213
+ "loss": 39.4016,
2214
+ "step": 315
2215
+ },
2216
+ {
2217
+ "epoch": 0.7177739920499716,
2218
+ "grad_norm": 3.158921003341675,
2219
+ "learning_rate": 2.9382184126304834e-05,
2220
+ "loss": 39.3934,
2221
+ "step": 316
2222
+ },
2223
+ {
2224
+ "epoch": 0.720045428733674,
2225
+ "grad_norm": 3.112635374069214,
2226
+ "learning_rate": 2.8962178775084263e-05,
2227
+ "loss": 39.4403,
2228
+ "step": 317
2229
+ },
2230
+ {
2231
+ "epoch": 0.7223168654173765,
2232
+ "grad_norm": 3.1762988567352295,
2233
+ "learning_rate": 2.8543969561369556e-05,
2234
+ "loss": 39.3596,
2235
+ "step": 318
2236
+ },
2237
+ {
2238
+ "epoch": 0.7245883021010789,
2239
+ "grad_norm": 3.0977957248687744,
2240
+ "learning_rate": 2.8127592190438688e-05,
2241
+ "loss": 39.4288,
2242
+ "step": 319
2243
+ },
2244
+ {
2245
+ "epoch": 0.7268597387847814,
2246
+ "grad_norm": 3.2995924949645996,
2247
+ "learning_rate": 2.771308221117309e-05,
2248
+ "loss": 39.3122,
2249
+ "step": 320
2250
+ },
2251
+ {
2252
+ "epoch": 0.7291311754684838,
2253
+ "grad_norm": 3.220078229904175,
2254
+ "learning_rate": 2.7300475013022663e-05,
2255
+ "loss": 39.3328,
2256
+ "step": 321
2257
+ },
2258
+ {
2259
+ "epoch": 0.7314026121521863,
2260
+ "grad_norm": 3.221082925796509,
2261
+ "learning_rate": 2.688980582298435e-05,
2262
+ "loss": 39.336,
2263
+ "step": 322
2264
+ },
2265
+ {
2266
+ "epoch": 0.7336740488358887,
2267
+ "grad_norm": 3.362293243408203,
2268
+ "learning_rate": 2.6481109702594542e-05,
2269
+ "loss": 39.2548,
2270
+ "step": 323
2271
+ },
2272
+ {
2273
+ "epoch": 0.7359454855195912,
2274
+ "grad_norm": 3.3790478706359863,
2275
+ "learning_rate": 2.607442154493568e-05,
2276
+ "loss": 39.2768,
2277
+ "step": 324
2278
+ },
2279
+ {
2280
+ "epoch": 0.7382169222032936,
2281
+ "grad_norm": 3.2779300212860107,
2282
+ "learning_rate": 2.5669776071657192e-05,
2283
+ "loss": 39.4005,
2284
+ "step": 325
2285
+ },
2286
+ {
2287
+ "epoch": 0.740488358886996,
2288
+ "grad_norm": 3.5260651111602783,
2289
+ "learning_rate": 2.5267207830011068e-05,
2290
+ "loss": 39.213,
2291
+ "step": 326
2292
+ },
2293
+ {
2294
+ "epoch": 0.7427597955706985,
2295
+ "grad_norm": 3.3797895908355713,
2296
+ "learning_rate": 2.4866751189902334e-05,
2297
+ "loss": 39.2461,
2298
+ "step": 327
2299
+ },
2300
+ {
2301
+ "epoch": 0.7450312322544009,
2302
+ "grad_norm": 3.299431085586548,
2303
+ "learning_rate": 2.446844034095466e-05,
2304
+ "loss": 39.2607,
2305
+ "step": 328
2306
+ },
2307
+ {
2308
+ "epoch": 0.7473026689381034,
2309
+ "grad_norm": 3.1269869804382324,
2310
+ "learning_rate": 2.4072309289591394e-05,
2311
+ "loss": 39.4012,
2312
+ "step": 329
2313
+ },
2314
+ {
2315
+ "epoch": 0.7495741056218058,
2316
+ "grad_norm": 3.103790521621704,
2317
+ "learning_rate": 2.3678391856132204e-05,
2318
+ "loss": 39.4544,
2319
+ "step": 330
2320
+ },
2321
+ {
2322
+ "epoch": 0.7518455423055083,
2323
+ "grad_norm": 3.0655603408813477,
2324
+ "learning_rate": 2.328672167190558e-05,
2325
+ "loss": 39.3722,
2326
+ "step": 331
2327
+ },
2328
+ {
2329
+ "epoch": 0.7541169789892107,
2330
+ "grad_norm": 3.3145480155944824,
2331
+ "learning_rate": 2.2897332176377528e-05,
2332
+ "loss": 39.2439,
2333
+ "step": 332
2334
+ },
2335
+ {
2336
+ "epoch": 0.7563884156729132,
2337
+ "grad_norm": 3.251753091812134,
2338
+ "learning_rate": 2.251025661429664e-05,
2339
+ "loss": 39.2775,
2340
+ "step": 333
2341
+ },
2342
+ {
2343
+ "epoch": 0.7586598523566156,
2344
+ "grad_norm": 3.268765687942505,
2345
+ "learning_rate": 2.2125528032855724e-05,
2346
+ "loss": 39.302,
2347
+ "step": 334
2348
+ },
2349
+ {
2350
+ "epoch": 0.760931289040318,
2351
+ "grad_norm": 3.238736867904663,
2352
+ "learning_rate": 2.1743179278870407e-05,
2353
+ "loss": 39.2998,
2354
+ "step": 335
2355
+ },
2356
+ {
2357
+ "epoch": 0.7632027257240205,
2358
+ "grad_norm": 3.285243272781372,
2359
+ "learning_rate": 2.136324299597474e-05,
2360
+ "loss": 39.2785,
2361
+ "step": 336
2362
+ },
2363
+ {
2364
+ "epoch": 0.7654741624077229,
2365
+ "grad_norm": 3.3332467079162598,
2366
+ "learning_rate": 2.098575162183422e-05,
2367
+ "loss": 39.2638,
2368
+ "step": 337
2369
+ },
2370
+ {
2371
+ "epoch": 0.7677455990914254,
2372
+ "grad_norm": 3.088116407394409,
2373
+ "learning_rate": 2.061073738537635e-05,
2374
+ "loss": 39.3631,
2375
+ "step": 338
2376
+ },
2377
+ {
2378
+ "epoch": 0.7700170357751278,
2379
+ "grad_norm": 3.2848522663116455,
2380
+ "learning_rate": 2.0238232304039068e-05,
2381
+ "loss": 39.2917,
2382
+ "step": 339
2383
+ },
2384
+ {
2385
+ "epoch": 0.7722884724588303,
2386
+ "grad_norm": 3.111776113510132,
2387
+ "learning_rate": 1.9868268181037185e-05,
2388
+ "loss": 39.3767,
2389
+ "step": 340
2390
+ },
2391
+ {
2392
+ "epoch": 0.7745599091425327,
2393
+ "grad_norm": 3.173539161682129,
2394
+ "learning_rate": 1.9500876602647167e-05,
2395
+ "loss": 39.3018,
2396
+ "step": 341
2397
+ },
2398
+ {
2399
+ "epoch": 0.7768313458262351,
2400
+ "grad_norm": 2.8784914016723633,
2401
+ "learning_rate": 1.9136088935510362e-05,
2402
+ "loss": 39.5558,
2403
+ "step": 342
2404
+ },
2405
+ {
2406
+ "epoch": 0.7791027825099376,
2407
+ "grad_norm": 3.044440507888794,
2408
+ "learning_rate": 1.8773936323955054e-05,
2409
+ "loss": 39.4651,
2410
+ "step": 343
2411
+ },
2412
+ {
2413
+ "epoch": 0.78137421919364,
2414
+ "grad_norm": 3.2132656574249268,
2415
+ "learning_rate": 1.8414449687337464e-05,
2416
+ "loss": 39.3461,
2417
+ "step": 344
2418
+ },
2419
+ {
2420
+ "epoch": 0.7836456558773425,
2421
+ "grad_norm": 3.2220561504364014,
2422
+ "learning_rate": 1.8057659717401947e-05,
2423
+ "loss": 39.3125,
2424
+ "step": 345
2425
+ },
2426
+ {
2427
+ "epoch": 0.7859170925610448,
2428
+ "grad_norm": 3.1892623901367188,
2429
+ "learning_rate": 1.7703596875660645e-05,
2430
+ "loss": 39.3226,
2431
+ "step": 346
2432
+ },
2433
+ {
2434
+ "epoch": 0.7881885292447472,
2435
+ "grad_norm": 3.2378811836242676,
2436
+ "learning_rate": 1.7352291390792798e-05,
2437
+ "loss": 39.3087,
2438
+ "step": 347
2439
+ },
2440
+ {
2441
+ "epoch": 0.7904599659284497,
2442
+ "grad_norm": 3.3155946731567383,
2443
+ "learning_rate": 1.700377325606388e-05,
2444
+ "loss": 39.2231,
2445
+ "step": 348
2446
+ },
2447
+ {
2448
+ "epoch": 0.7927314026121521,
2449
+ "grad_norm": 3.3325536251068115,
2450
+ "learning_rate": 1.665807222676495e-05,
2451
+ "loss": 39.2634,
2452
+ "step": 349
2453
+ },
2454
+ {
2455
+ "epoch": 0.7950028392958546,
2456
+ "grad_norm": 3.0400354862213135,
2457
+ "learning_rate": 1.631521781767214e-05,
2458
+ "loss": 39.4798,
2459
+ "step": 350
2460
+ },
2461
+ {
2462
+ "epoch": 0.797274275979557,
2463
+ "grad_norm": 3.4280900955200195,
2464
+ "learning_rate": 1.5975239300526923e-05,
2465
+ "loss": 39.2302,
2466
+ "step": 351
2467
+ },
2468
+ {
2469
+ "epoch": 0.7995457126632595,
2470
+ "grad_norm": 3.222968339920044,
2471
+ "learning_rate": 1.5638165701536868e-05,
2472
+ "loss": 39.2894,
2473
+ "step": 352
2474
+ },
2475
+ {
2476
+ "epoch": 0.8018171493469619,
2477
+ "grad_norm": 3.081446886062622,
2478
+ "learning_rate": 1.530402579889752e-05,
2479
+ "loss": 39.4086,
2480
+ "step": 353
2481
+ },
2482
+ {
2483
+ "epoch": 0.8040885860306644,
2484
+ "grad_norm": 3.1126420497894287,
2485
+ "learning_rate": 1.4972848120335453e-05,
2486
+ "loss": 39.3398,
2487
+ "step": 354
2488
+ },
2489
+ {
2490
+ "epoch": 0.8063600227143668,
2491
+ "grad_norm": 3.0955779552459717,
2492
+ "learning_rate": 1.4644660940672627e-05,
2493
+ "loss": 39.4006,
2494
+ "step": 355
2495
+ },
2496
+ {
2497
+ "epoch": 0.8086314593980692,
2498
+ "grad_norm": 3.705615758895874,
2499
+ "learning_rate": 1.4319492279412388e-05,
2500
+ "loss": 39.4957,
2501
+ "step": 356
2502
+ },
2503
+ {
2504
+ "epoch": 0.8109028960817717,
2505
+ "grad_norm": 3.2060508728027344,
2506
+ "learning_rate": 1.3997369898347278e-05,
2507
+ "loss": 39.3245,
2508
+ "step": 357
2509
+ },
2510
+ {
2511
+ "epoch": 0.8131743327654741,
2512
+ "grad_norm": 3.3021256923675537,
2513
+ "learning_rate": 1.3678321299188801e-05,
2514
+ "loss": 39.285,
2515
+ "step": 358
2516
+ },
2517
+ {
2518
+ "epoch": 0.8154457694491766,
2519
+ "grad_norm": 3.4100747108459473,
2520
+ "learning_rate": 1.336237372121944e-05,
2521
+ "loss": 39.213,
2522
+ "step": 359
2523
+ },
2524
+ {
2525
+ "epoch": 0.817717206132879,
2526
+ "grad_norm": 3.3174431324005127,
2527
+ "learning_rate": 1.3049554138967051e-05,
2528
+ "loss": 39.2722,
2529
+ "step": 360
2530
+ },
2531
+ {
2532
+ "epoch": 0.8199886428165815,
2533
+ "grad_norm": 3.4100990295410156,
2534
+ "learning_rate": 1.2739889259901866e-05,
2535
+ "loss": 39.1835,
2536
+ "step": 361
2537
+ },
2538
+ {
2539
+ "epoch": 0.8222600795002839,
2540
+ "grad_norm": 3.3656208515167236,
2541
+ "learning_rate": 1.2433405522156332e-05,
2542
+ "loss": 39.2537,
2543
+ "step": 362
2544
+ },
2545
+ {
2546
+ "epoch": 0.8245315161839863,
2547
+ "grad_norm": 3.10040545463562,
2548
+ "learning_rate": 1.2130129092267862e-05,
2549
+ "loss": 39.323,
2550
+ "step": 363
2551
+ },
2552
+ {
2553
+ "epoch": 0.8268029528676888,
2554
+ "grad_norm": 3.2904765605926514,
2555
+ "learning_rate": 1.183008586294485e-05,
2556
+ "loss": 39.292,
2557
+ "step": 364
2558
+ },
2559
+ {
2560
+ "epoch": 0.8290743895513912,
2561
+ "grad_norm": 3.159372568130493,
2562
+ "learning_rate": 1.1533301450856054e-05,
2563
+ "loss": 39.3521,
2564
+ "step": 365
2565
+ },
2566
+ {
2567
+ "epoch": 0.8313458262350937,
2568
+ "grad_norm": 3.253182888031006,
2569
+ "learning_rate": 1.1239801194443506e-05,
2570
+ "loss": 39.3166,
2571
+ "step": 366
2572
+ },
2573
+ {
2574
+ "epoch": 0.8336172629187961,
2575
+ "grad_norm": 3.0811924934387207,
2576
+ "learning_rate": 1.0949610151759232e-05,
2577
+ "loss": 39.378,
2578
+ "step": 367
2579
+ },
2580
+ {
2581
+ "epoch": 0.8358886996024986,
2582
+ "grad_norm": 3.2115979194641113,
2583
+ "learning_rate": 1.066275309832584e-05,
2584
+ "loss": 39.3243,
2585
+ "step": 368
2586
+ },
2587
+ {
2588
+ "epoch": 0.838160136286201,
2589
+ "grad_norm": 3.2386271953582764,
2590
+ "learning_rate": 1.0379254525021309e-05,
2591
+ "loss": 39.2904,
2592
+ "step": 369
2593
+ },
2594
+ {
2595
+ "epoch": 0.8404315729699035,
2596
+ "grad_norm": 3.2060539722442627,
2597
+ "learning_rate": 1.0099138635988026e-05,
2598
+ "loss": 39.3027,
2599
+ "step": 370
2600
+ },
2601
+ {
2602
+ "epoch": 0.8427030096536059,
2603
+ "grad_norm": 3.284043312072754,
2604
+ "learning_rate": 9.822429346566314e-06,
2605
+ "loss": 39.2547,
2606
+ "step": 371
2607
+ },
2608
+ {
2609
+ "epoch": 0.8449744463373083,
2610
+ "grad_norm": 3.284778594970703,
2611
+ "learning_rate": 9.549150281252633e-06,
2612
+ "loss": 39.2768,
2613
+ "step": 372
2614
+ },
2615
+ {
2616
+ "epoch": 0.8472458830210108,
2617
+ "grad_norm": 3.3626978397369385,
2618
+ "learning_rate": 9.279324771682585e-06,
2619
+ "loss": 39.2264,
2620
+ "step": 373
2621
+ },
2622
+ {
2623
+ "epoch": 0.8495173197047132,
2624
+ "grad_norm": 3.3178279399871826,
2625
+ "learning_rate": 9.012975854638949e-06,
2626
+ "loss": 39.2468,
2627
+ "step": 374
2628
+ },
2629
+ {
2630
+ "epoch": 0.8517887563884157,
2631
+ "grad_norm": 3.1009223461151123,
2632
+ "learning_rate": 8.75012627008489e-06,
2633
+ "loss": 39.3837,
2634
+ "step": 375
2635
+ },
2636
+ {
2637
+ "epoch": 0.8540601930721181,
2638
+ "grad_norm": 3.5075793266296387,
2639
+ "learning_rate": 8.490798459222476e-06,
2640
+ "loss": 39.1917,
2641
+ "step": 376
2642
+ },
2643
+ {
2644
+ "epoch": 0.8563316297558206,
2645
+ "grad_norm": 3.315697193145752,
2646
+ "learning_rate": 8.235014562576732e-06,
2647
+ "loss": 39.2783,
2648
+ "step": 377
2649
+ },
2650
+ {
2651
+ "epoch": 0.858603066439523,
2652
+ "grad_norm": 3.2278759479522705,
2653
+ "learning_rate": 7.982796418105371e-06,
2654
+ "loss": 39.334,
2655
+ "step": 378
2656
+ },
2657
+ {
2658
+ "epoch": 0.8608745031232254,
2659
+ "grad_norm": 3.0192768573760986,
2660
+ "learning_rate": 7.734165559334328e-06,
2661
+ "loss": 39.4135,
2662
+ "step": 379
2663
+ },
2664
+ {
2665
+ "epoch": 0.8631459398069279,
2666
+ "grad_norm": 3.283160924911499,
2667
+ "learning_rate": 7.489143213519301e-06,
2668
+ "loss": 39.2609,
2669
+ "step": 380
2670
+ },
2671
+ {
2672
+ "epoch": 0.8654173764906303,
2673
+ "grad_norm": 3.1758711338043213,
2674
+ "learning_rate": 7.2477502998334505e-06,
2675
+ "loss": 39.2964,
2676
+ "step": 381
2677
+ },
2678
+ {
2679
+ "epoch": 0.8676888131743328,
2680
+ "grad_norm": 3.1060142517089844,
2681
+ "learning_rate": 7.010007427581378e-06,
2682
+ "loss": 39.3985,
2683
+ "step": 382
2684
+ },
2685
+ {
2686
+ "epoch": 0.8699602498580352,
2687
+ "grad_norm": 3.13398814201355,
2688
+ "learning_rate": 6.775934894439606e-06,
2689
+ "loss": 39.4141,
2690
+ "step": 383
2691
+ },
2692
+ {
2693
+ "epoch": 0.8722316865417377,
2694
+ "grad_norm": 3.2694761753082275,
2695
+ "learning_rate": 6.5455526847235825e-06,
2696
+ "loss": 39.2613,
2697
+ "step": 384
2698
+ },
2699
+ {
2700
+ "epoch": 0.8745031232254401,
2701
+ "grad_norm": 3.3462014198303223,
2702
+ "learning_rate": 6.318880467681526e-06,
2703
+ "loss": 39.2397,
2704
+ "step": 385
2705
+ },
2706
+ {
2707
+ "epoch": 0.8767745599091425,
2708
+ "grad_norm": 3.345944881439209,
2709
+ "learning_rate": 6.0959375958151045e-06,
2710
+ "loss": 39.2334,
2711
+ "step": 386
2712
+ },
2713
+ {
2714
+ "epoch": 0.879045996592845,
2715
+ "grad_norm": 3.225269079208374,
2716
+ "learning_rate": 5.876743103227217e-06,
2717
+ "loss": 39.2799,
2718
+ "step": 387
2719
+ },
2720
+ {
2721
+ "epoch": 0.8813174332765474,
2722
+ "grad_norm": 3.058452844619751,
2723
+ "learning_rate": 5.6613157039969055e-06,
2724
+ "loss": 39.4356,
2725
+ "step": 388
2726
+ },
2727
+ {
2728
+ "epoch": 0.8835888699602499,
2729
+ "grad_norm": 3.2678914070129395,
2730
+ "learning_rate": 5.449673790581611e-06,
2731
+ "loss": 39.2977,
2732
+ "step": 389
2733
+ },
2734
+ {
2735
+ "epoch": 0.8858603066439523,
2736
+ "grad_norm": 3.111703395843506,
2737
+ "learning_rate": 5.241835432246889e-06,
2738
+ "loss": 39.3645,
2739
+ "step": 390
2740
+ },
2741
+ {
2742
+ "epoch": 0.8881317433276548,
2743
+ "grad_norm": 3.189887046813965,
2744
+ "learning_rate": 5.037818373523723e-06,
2745
+ "loss": 39.3412,
2746
+ "step": 391
2747
+ },
2748
+ {
2749
+ "epoch": 0.8904031800113572,
2750
+ "grad_norm": 3.096855640411377,
2751
+ "learning_rate": 4.837640032693558e-06,
2752
+ "loss": 39.4025,
2753
+ "step": 392
2754
+ },
2755
+ {
2756
+ "epoch": 0.8926746166950597,
2757
+ "grad_norm": 2.981090784072876,
2758
+ "learning_rate": 4.641317500301173e-06,
2759
+ "loss": 39.5613,
2760
+ "step": 393
2761
+ },
2762
+ {
2763
+ "epoch": 0.8949460533787621,
2764
+ "grad_norm": 3.221526861190796,
2765
+ "learning_rate": 4.448867537695578e-06,
2766
+ "loss": 39.298,
2767
+ "step": 394
2768
+ },
2769
+ {
2770
+ "epoch": 0.8972174900624645,
2771
+ "grad_norm": 3.0834949016571045,
2772
+ "learning_rate": 4.260306575598949e-06,
2773
+ "loss": 39.4121,
2774
+ "step": 395
2775
+ },
2776
+ {
2777
+ "epoch": 0.899488926746167,
2778
+ "grad_norm": 3.3117966651916504,
2779
+ "learning_rate": 4.075650712703849e-06,
2780
+ "loss": 39.3448,
2781
+ "step": 396
2782
+ },
2783
+ {
2784
+ "epoch": 0.9017603634298694,
2785
+ "grad_norm": 3.3456838130950928,
2786
+ "learning_rate": 3.8949157142987746e-06,
2787
+ "loss": 39.2115,
2788
+ "step": 397
2789
+ },
2790
+ {
2791
+ "epoch": 0.9040318001135719,
2792
+ "grad_norm": 3.269028902053833,
2793
+ "learning_rate": 3.71811701092219e-06,
2794
+ "loss": 39.2706,
2795
+ "step": 398
2796
+ },
2797
+ {
2798
+ "epoch": 0.9063032367972743,
2799
+ "grad_norm": 3.4121360778808594,
2800
+ "learning_rate": 3.545269697045067e-06,
2801
+ "loss": 39.2025,
2802
+ "step": 399
2803
+ },
2804
+ {
2805
+ "epoch": 0.9085746734809768,
2806
+ "grad_norm": 3.294390916824341,
2807
+ "learning_rate": 3.376388529782215e-06,
2808
+ "loss": 39.3322,
2809
+ "step": 400
2810
+ },
2811
+ {
2812
+ "epoch": 0.9108461101646792,
2813
+ "grad_norm": 3.429776668548584,
2814
+ "learning_rate": 3.2114879276323783e-06,
2815
+ "loss": 39.2353,
2816
+ "step": 401
2817
+ },
2818
+ {
2819
+ "epoch": 0.9131175468483816,
2820
+ "grad_norm": 3.1151254177093506,
2821
+ "learning_rate": 3.0505819692471792e-06,
2822
+ "loss": 39.3944,
2823
+ "step": 402
2824
+ },
2825
+ {
2826
+ "epoch": 0.9153889835320841,
2827
+ "grad_norm": 3.2361867427825928,
2828
+ "learning_rate": 2.8936843922291847e-06,
2829
+ "loss": 39.2727,
2830
+ "step": 403
2831
+ },
2832
+ {
2833
+ "epoch": 0.9176604202157865,
2834
+ "grad_norm": 3.2054972648620605,
2835
+ "learning_rate": 2.7408085919590264e-06,
2836
+ "loss": 39.316,
2837
+ "step": 404
2838
+ },
2839
+ {
2840
+ "epoch": 0.919931856899489,
2841
+ "grad_norm": 3.019122362136841,
2842
+ "learning_rate": 2.591967620451707e-06,
2843
+ "loss": 39.4442,
2844
+ "step": 405
2845
+ },
2846
+ {
2847
+ "epoch": 0.9222032935831914,
2848
+ "grad_norm": 3.0654804706573486,
2849
+ "learning_rate": 2.4471741852423237e-06,
2850
+ "loss": 39.4069,
2851
+ "step": 406
2852
+ },
2853
+ {
2854
+ "epoch": 0.9244747302668939,
2855
+ "grad_norm": 3.1281793117523193,
2856
+ "learning_rate": 2.306440648301095e-06,
2857
+ "loss": 39.3489,
2858
+ "step": 407
2859
+ },
2860
+ {
2861
+ "epoch": 0.9267461669505963,
2862
+ "grad_norm": 3.2688112258911133,
2863
+ "learning_rate": 2.1697790249779636e-06,
2864
+ "loss": 39.3093,
2865
+ "step": 408
2866
+ },
2867
+ {
2868
+ "epoch": 0.9290176036342986,
2869
+ "grad_norm": 3.3317718505859375,
2870
+ "learning_rate": 2.0372009829767557e-06,
2871
+ "loss": 39.2383,
2872
+ "step": 409
2873
+ },
2874
+ {
2875
+ "epoch": 0.9312890403180011,
2876
+ "grad_norm": 3.1908419132232666,
2877
+ "learning_rate": 1.908717841359048e-06,
2878
+ "loss": 39.3652,
2879
+ "step": 410
2880
+ },
2881
+ {
2882
+ "epoch": 0.9335604770017035,
2883
+ "grad_norm": 3.279597043991089,
2884
+ "learning_rate": 1.784340569577758e-06,
2885
+ "loss": 39.2765,
2886
+ "step": 411
2887
+ },
2888
+ {
2889
+ "epoch": 0.935831913685406,
2890
+ "grad_norm": 3.275874614715576,
2891
+ "learning_rate": 1.6640797865406288e-06,
2892
+ "loss": 39.2798,
2893
+ "step": 412
2894
+ },
2895
+ {
2896
+ "epoch": 0.9381033503691084,
2897
+ "grad_norm": 3.114420175552368,
2898
+ "learning_rate": 1.547945759703623e-06,
2899
+ "loss": 39.3152,
2900
+ "step": 413
2901
+ },
2902
+ {
2903
+ "epoch": 0.9403747870528109,
2904
+ "grad_norm": 3.284017324447632,
2905
+ "learning_rate": 1.4359484041943038e-06,
2906
+ "loss": 39.3038,
2907
+ "step": 414
2908
+ },
2909
+ {
2910
+ "epoch": 0.9426462237365133,
2911
+ "grad_norm": 3.1268672943115234,
2912
+ "learning_rate": 1.328097281965357e-06,
2913
+ "loss": 39.361,
2914
+ "step": 415
2915
+ },
2916
+ {
2917
+ "epoch": 0.9449176604202157,
2918
+ "grad_norm": 3.221214532852173,
2919
+ "learning_rate": 1.2244016009781701e-06,
2920
+ "loss": 39.2966,
2921
+ "step": 416
2922
+ },
2923
+ {
2924
+ "epoch": 0.9471890971039182,
2925
+ "grad_norm": 3.1034584045410156,
2926
+ "learning_rate": 1.1248702144167122e-06,
2927
+ "loss": 39.4244,
2928
+ "step": 417
2929
+ },
2930
+ {
2931
+ "epoch": 0.9494605337876206,
2932
+ "grad_norm": 3.3153748512268066,
2933
+ "learning_rate": 1.0295116199317057e-06,
2934
+ "loss": 39.2435,
2935
+ "step": 418
2936
+ },
2937
+ {
2938
+ "epoch": 0.9517319704713231,
2939
+ "grad_norm": 3.1586427688598633,
2940
+ "learning_rate": 9.383339589150775e-07,
2941
+ "loss": 39.3637,
2942
+ "step": 419
2943
+ },
2944
+ {
2945
+ "epoch": 0.9540034071550255,
2946
+ "grad_norm": 3.3240771293640137,
2947
+ "learning_rate": 8.513450158049108e-07,
2948
+ "loss": 39.4772,
2949
+ "step": 420
2950
+ },
2951
+ {
2952
+ "epoch": 0.956274843838728,
2953
+ "grad_norm": 3.2368547916412354,
2954
+ "learning_rate": 7.685522174208204e-07,
2955
+ "loss": 39.3071,
2956
+ "step": 421
2957
+ },
2958
+ {
2959
+ "epoch": 0.9585462805224304,
2960
+ "grad_norm": 3.236651659011841,
2961
+ "learning_rate": 6.899626323298713e-07,
2962
+ "loss": 39.3127,
2963
+ "step": 422
2964
+ },
2965
+ {
2966
+ "epoch": 0.9608177172061328,
2967
+ "grad_norm": 3.3495962619781494,
2968
+ "learning_rate": 6.15582970243117e-07,
2969
+ "loss": 39.2375,
2970
+ "step": 423
2971
+ },
2972
+ {
2973
+ "epoch": 0.9630891538898353,
2974
+ "grad_norm": 3.3962039947509766,
2975
+ "learning_rate": 5.454195814427021e-07,
2976
+ "loss": 39.2121,
2977
+ "step": 424
2978
+ },
2979
+ {
2980
+ "epoch": 0.9653605905735377,
2981
+ "grad_norm": 3.2395989894866943,
2982
+ "learning_rate": 4.794784562397458e-07,
2983
+ "loss": 39.3095,
2984
+ "step": 425
2985
+ },
2986
+ {
2987
+ "epoch": 0.9676320272572402,
2988
+ "grad_norm": 3.381061315536499,
2989
+ "learning_rate": 4.177652244628627e-07,
2990
+ "loss": 39.2333,
2991
+ "step": 426
2992
+ },
2993
+ {
2994
+ "epoch": 0.9699034639409426,
2995
+ "grad_norm": 3.2255821228027344,
2996
+ "learning_rate": 3.602851549775521e-07,
2997
+ "loss": 39.429,
2998
+ "step": 427
2999
+ },
3000
+ {
3001
+ "epoch": 0.9721749006246451,
3002
+ "grad_norm": 3.1304969787597656,
3003
+ "learning_rate": 3.0704315523631953e-07,
3004
+ "loss": 39.4258,
3005
+ "step": 428
3006
+ },
3007
+ {
3008
+ "epoch": 0.9744463373083475,
3009
+ "grad_norm": 3.189854145050049,
3010
+ "learning_rate": 2.5804377085972274e-07,
3011
+ "loss": 39.324,
3012
+ "step": 429
3013
+ },
3014
+ {
3015
+ "epoch": 0.97671777399205,
3016
+ "grad_norm": 3.220968246459961,
3017
+ "learning_rate": 2.1329118524827662e-07,
3018
+ "loss": 39.285,
3019
+ "step": 430
3020
+ },
3021
+ {
3022
+ "epoch": 0.9789892106757524,
3023
+ "grad_norm": 3.2234549522399902,
3024
+ "learning_rate": 1.7278921922527225e-07,
3025
+ "loss": 39.3381,
3026
+ "step": 431
3027
+ },
3028
+ {
3029
+ "epoch": 0.9812606473594548,
3030
+ "grad_norm": 3.1990137100219727,
3031
+ "learning_rate": 1.3654133071059893e-07,
3032
+ "loss": 39.3761,
3033
+ "step": 432
3034
+ },
3035
+ {
3036
+ "epoch": 0.9835320840431573,
3037
+ "grad_norm": 3.230268716812134,
3038
+ "learning_rate": 1.0455061442548597e-07,
3039
+ "loss": 39.3554,
3040
+ "step": 433
3041
+ },
3042
+ {
3043
+ "epoch": 0.9858035207268597,
3044
+ "grad_norm": 3.22092866897583,
3045
+ "learning_rate": 7.681980162830282e-08,
3046
+ "loss": 39.281,
3047
+ "step": 434
3048
+ },
3049
+ {
3050
+ "epoch": 0.9880749574105622,
3051
+ "grad_norm": 3.2216899394989014,
3052
+ "learning_rate": 5.3351259881379014e-08,
3053
+ "loss": 39.3009,
3054
+ "step": 435
3055
+ },
3056
+ {
3057
+ "epoch": 0.9903463940942646,
3058
+ "grad_norm": 3.283959150314331,
3059
+ "learning_rate": 3.4146992848854695e-08,
3060
+ "loss": 39.2686,
3061
+ "step": 436
3062
+ },
3063
+ {
3064
+ "epoch": 0.9926178307779671,
3065
+ "grad_norm": 3.333789825439453,
3066
+ "learning_rate": 1.920864012562862e-08,
3067
+ "loss": 39.2601,
3068
+ "step": 437
3069
+ },
3070
+ {
3071
+ "epoch": 0.9948892674616695,
3072
+ "grad_norm": 3.285109281539917,
3073
+ "learning_rate": 8.537477097364522e-09,
3074
+ "loss": 39.2603,
3075
+ "step": 438
3076
+ },
3077
+ {
3078
+ "epoch": 0.997160704145372,
3079
+ "grad_norm": 3.0986461639404297,
3080
+ "learning_rate": 2.1344148316060354e-09,
3081
+ "loss": 39.4098,
3082
+ "step": 439
3083
+ },
3084
+ {
3085
+ "epoch": 0.9994321408290744,
3086
+ "grad_norm": 3.2520036697387695,
3087
+ "learning_rate": 0.0,
3088
+ "loss": 39.3073,
3089
+ "step": 440
3090
  }
3091
  ],
3092
  "logging_steps": 1,
 
3101
  "should_evaluate": false,
3102
  "should_log": false,
3103
  "should_save": true,
3104
+ "should_training_stop": true
3105
  },
3106
  "attributes": {}
3107
  }
3108
  },
3109
+ "total_flos": 811196274966528.0,
3110
  "train_batch_size": 4,
3111
  "trial_name": null,
3112
  "trial_params": null