zitongyang commited on
Commit
d323924
·
verified ·
1 Parent(s): 049b2c2

Upload folder using huggingface_hub

Browse files
Files changed (34) hide show
  1. checkpoint-488/config.json +29 -0
  2. checkpoint-488/generation_config.json +14 -0
  3. checkpoint-488/model-00001-of-00029.safetensors +3 -0
  4. checkpoint-488/model-00002-of-00029.safetensors +3 -0
  5. checkpoint-488/model-00003-of-00029.safetensors +3 -0
  6. checkpoint-488/model-00004-of-00029.safetensors +3 -0
  7. checkpoint-488/model-00005-of-00029.safetensors +3 -0
  8. checkpoint-488/model-00006-of-00029.safetensors +3 -0
  9. checkpoint-488/model-00007-of-00029.safetensors +3 -0
  10. checkpoint-488/model-00008-of-00029.safetensors +3 -0
  11. checkpoint-488/model-00009-of-00029.safetensors +3 -0
  12. checkpoint-488/model-00010-of-00029.safetensors +3 -0
  13. checkpoint-488/model-00011-of-00029.safetensors +3 -0
  14. checkpoint-488/model-00012-of-00029.safetensors +3 -0
  15. checkpoint-488/model-00013-of-00029.safetensors +3 -0
  16. checkpoint-488/model-00014-of-00029.safetensors +3 -0
  17. checkpoint-488/model-00015-of-00029.safetensors +3 -0
  18. checkpoint-488/model-00016-of-00029.safetensors +3 -0
  19. checkpoint-488/model-00017-of-00029.safetensors +3 -0
  20. checkpoint-488/model-00018-of-00029.safetensors +3 -0
  21. checkpoint-488/model-00019-of-00029.safetensors +3 -0
  22. checkpoint-488/model-00020-of-00029.safetensors +3 -0
  23. checkpoint-488/model-00021-of-00029.safetensors +3 -0
  24. checkpoint-488/model-00022-of-00029.safetensors +3 -0
  25. checkpoint-488/model-00023-of-00029.safetensors +3 -0
  26. checkpoint-488/model-00024-of-00029.safetensors +3 -0
  27. checkpoint-488/model-00025-of-00029.safetensors +3 -0
  28. checkpoint-488/model-00026-of-00029.safetensors +3 -0
  29. checkpoint-488/model-00027-of-00029.safetensors +3 -0
  30. checkpoint-488/model-00028-of-00029.safetensors +3 -0
  31. checkpoint-488/model-00029-of-00029.safetensors +3 -0
  32. checkpoint-488/model.safetensors.index.json +778 -0
  33. checkpoint-488/trainer_state.json +3449 -0
  34. checkpoint-488/training_args.bin +3 -0
checkpoint-488/config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "Qwen/Qwen2.5-32B-Instruct",
3
+ "architectures": [
4
+ "Qwen2ForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "eos_token_id": 151645,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 5120,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 27648,
13
+ "max_position_embeddings": 32768,
14
+ "max_window_layers": 70,
15
+ "model_type": "qwen2",
16
+ "num_attention_heads": 40,
17
+ "num_hidden_layers": 64,
18
+ "num_key_value_heads": 8,
19
+ "rms_norm_eps": 1e-06,
20
+ "rope_scaling": null,
21
+ "rope_theta": 1000000.0,
22
+ "sliding_window": null,
23
+ "tie_word_embeddings": false,
24
+ "torch_dtype": "float32",
25
+ "transformers_version": "4.46.1",
26
+ "use_cache": true,
27
+ "use_sliding_window": false,
28
+ "vocab_size": 152064
29
+ }
checkpoint-488/generation_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
+ "pad_token_id": 151643,
9
+ "repetition_penalty": 1.05,
10
+ "temperature": 0.7,
11
+ "top_k": 20,
12
+ "top_p": 0.8,
13
+ "transformers_version": "4.46.1"
14
+ }
checkpoint-488/model-00001-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3925686029c5d68f2d1b127f111b6d4c86408a56e2b799e123d2d62163eb038f
3
+ size 4498420872
checkpoint-488/model-00002-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc21d957d2ba08c653cb9d700fda1892e2c299af1f0aaf96b951f89591352d91
3
+ size 4718804768
checkpoint-488/model-00003-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:11312f0e892e58511850e04d173503ae9072bb10f1ab1e8001e920724aaccb12
3
+ size 4467075880
checkpoint-488/model-00004-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:144e44afde7d7b2c4310f9e048eb68247bb12dde481d61303eedb9d52e0929dc
3
+ size 4467075880
checkpoint-488/model-00005-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:172f1542703f7b4a844a1837410270839a062c66ce1ccad4cf75b38d4e366344
3
+ size 4718804760
checkpoint-488/model-00006-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:622580e9769c07789ded808118ceef8fdedc84f4b13ddfc643e979c42ab58d3d
3
+ size 4467075904
checkpoint-488/model-00007-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:410dd6e20d83e0e5458d16f95ba94ce969eea5b9c80a78f449e56e3479e7382f
3
+ size 4467075904
checkpoint-488/model-00008-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0ae1603fae21f7f3c615fb6a0f9670f68caed49d6a0db540c7399978ea08f7a4
3
+ size 4718804800
checkpoint-488/model-00009-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ae2bcdfadc16fcb33161a161410e2c746da0995303bbfd69caf0803995841873
3
+ size 4467075904
checkpoint-488/model-00010-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a4e3a72135703d600c4679d9da1859398cd52517f937190503c9e6ab3ebd0ef3
3
+ size 4467075904
checkpoint-488/model-00011-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a2baee1f6409b0b88613043924c4a8c4a883e61794c6a5cbbf9e1b6b704f598
3
+ size 4718804800
checkpoint-488/model-00012-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7792a83f806800fd9eca9c30b3d85476ff3810fef24825768b957203cf09b59e
3
+ size 4467075904
checkpoint-488/model-00013-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8d0b733cdc5273beda1948b56863de9a9fdbc688ce8a5200e1c43bbf4a7ac3fe
3
+ size 4467075904
checkpoint-488/model-00014-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:69bc5dc84b0ea161e5dc22c7194c1bf35971d7b0d7796e999c69cbe1fa2eed93
3
+ size 4718804800
checkpoint-488/model-00015-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce19764f387e71683bbae4573b8887ce993e56bc7a77b0a0c84b62efe397dbd4
3
+ size 4467075904
checkpoint-488/model-00016-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:426b134a84cae9c9fdbd49b73da4bcdd6e439186724d18d526eda758f0134d25
3
+ size 4467075904
checkpoint-488/model-00017-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eb3a1b8c1e43dd613d6357989de03b5b49324d718a860912ec2fb6446b4a0bbe
3
+ size 4718804800
checkpoint-488/model-00018-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2018b61b60747f3c4ec8ac8d77273757abad2544f0bf394c97c8349c933fd9a7
3
+ size 4467075904
checkpoint-488/model-00019-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a6bba325e2b8c5203e3034204209ee16893a9b8440dc2dc2a850b5d83dd5a2de
3
+ size 4467075904
checkpoint-488/model-00020-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a54e0dfd050eb03ba5d546cb9b76254aba70625dcbffd64def908ab0fc66497
3
+ size 4718804800
checkpoint-488/model-00021-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ccc2602b69befd91b280176df98b369fe2cde0aee2650617f1a8009a507b9b9c
3
+ size 4467075904
checkpoint-488/model-00022-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:418c77430d1daf488d466f872456b53e25040ab3d79a9e5ad87d3f203bc94303
3
+ size 4467075904
checkpoint-488/model-00023-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d33779735bb220cb23551b6b74454c7c3fbb4a5657d62124d67ec32816f32e2f
3
+ size 4718804800
checkpoint-488/model-00024-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:efeefa8c6d25087f63fcf890137494393b4e65a3b76fd57560c6b13abd959f53
3
+ size 4467075904
checkpoint-488/model-00025-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:580ac3b0a596e5a2ae8c12d73d256be2a2f9ffaeb42984504dccf25ab61ff65c
3
+ size 4467075904
checkpoint-488/model-00026-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f99131a1d542e95587aa7c3a79611ffe21387e3ea921a6eec220e2a61cb16386
3
+ size 4718804800
checkpoint-488/model-00027-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e52396fc8995a006fa3c7ecc12186d1217344bfd8c9ae3a1bc29b9ef76bbba18
3
+ size 4467075904
checkpoint-488/model-00028-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:071309069403e6aa797ab7355d9138ff3ac9f70d1dfc0cad6a5fe0ff51f5cb1e
3
+ size 4467075904
checkpoint-488/model-00029-of-00029.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:81a3f62120924ac5b3d07a05b0cd770cb8996dc30aade1a40992031f0b37ee4f
3
+ size 3680563768
checkpoint-488/model.safetensors.index.json ADDED
@@ -0,0 +1,778 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 131055505408
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00029-of-00029.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00029.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00002-of-00029.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00002-of-00029.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00029.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00029.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00002-of-00029.safetensors",
13
+ "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00029.safetensors",
14
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00029.safetensors",
15
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00029.safetensors",
16
+ "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00029.safetensors",
17
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00029.safetensors",
18
+ "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00029.safetensors",
19
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00029.safetensors",
20
+ "model.layers.1.input_layernorm.weight": "model-00002-of-00029.safetensors",
21
+ "model.layers.1.mlp.down_proj.weight": "model-00002-of-00029.safetensors",
22
+ "model.layers.1.mlp.gate_proj.weight": "model-00002-of-00029.safetensors",
23
+ "model.layers.1.mlp.up_proj.weight": "model-00002-of-00029.safetensors",
24
+ "model.layers.1.post_attention_layernorm.weight": "model-00002-of-00029.safetensors",
25
+ "model.layers.1.self_attn.k_proj.bias": "model-00002-of-00029.safetensors",
26
+ "model.layers.1.self_attn.k_proj.weight": "model-00002-of-00029.safetensors",
27
+ "model.layers.1.self_attn.o_proj.weight": "model-00002-of-00029.safetensors",
28
+ "model.layers.1.self_attn.q_proj.bias": "model-00002-of-00029.safetensors",
29
+ "model.layers.1.self_attn.q_proj.weight": "model-00002-of-00029.safetensors",
30
+ "model.layers.1.self_attn.v_proj.bias": "model-00002-of-00029.safetensors",
31
+ "model.layers.1.self_attn.v_proj.weight": "model-00002-of-00029.safetensors",
32
+ "model.layers.10.input_layernorm.weight": "model-00006-of-00029.safetensors",
33
+ "model.layers.10.mlp.down_proj.weight": "model-00006-of-00029.safetensors",
34
+ "model.layers.10.mlp.gate_proj.weight": "model-00006-of-00029.safetensors",
35
+ "model.layers.10.mlp.up_proj.weight": "model-00006-of-00029.safetensors",
36
+ "model.layers.10.post_attention_layernorm.weight": "model-00006-of-00029.safetensors",
37
+ "model.layers.10.self_attn.k_proj.bias": "model-00005-of-00029.safetensors",
38
+ "model.layers.10.self_attn.k_proj.weight": "model-00005-of-00029.safetensors",
39
+ "model.layers.10.self_attn.o_proj.weight": "model-00005-of-00029.safetensors",
40
+ "model.layers.10.self_attn.q_proj.bias": "model-00005-of-00029.safetensors",
41
+ "model.layers.10.self_attn.q_proj.weight": "model-00005-of-00029.safetensors",
42
+ "model.layers.10.self_attn.v_proj.bias": "model-00005-of-00029.safetensors",
43
+ "model.layers.10.self_attn.v_proj.weight": "model-00005-of-00029.safetensors",
44
+ "model.layers.11.input_layernorm.weight": "model-00006-of-00029.safetensors",
45
+ "model.layers.11.mlp.down_proj.weight": "model-00006-of-00029.safetensors",
46
+ "model.layers.11.mlp.gate_proj.weight": "model-00006-of-00029.safetensors",
47
+ "model.layers.11.mlp.up_proj.weight": "model-00006-of-00029.safetensors",
48
+ "model.layers.11.post_attention_layernorm.weight": "model-00006-of-00029.safetensors",
49
+ "model.layers.11.self_attn.k_proj.bias": "model-00006-of-00029.safetensors",
50
+ "model.layers.11.self_attn.k_proj.weight": "model-00006-of-00029.safetensors",
51
+ "model.layers.11.self_attn.o_proj.weight": "model-00006-of-00029.safetensors",
52
+ "model.layers.11.self_attn.q_proj.bias": "model-00006-of-00029.safetensors",
53
+ "model.layers.11.self_attn.q_proj.weight": "model-00006-of-00029.safetensors",
54
+ "model.layers.11.self_attn.v_proj.bias": "model-00006-of-00029.safetensors",
55
+ "model.layers.11.self_attn.v_proj.weight": "model-00006-of-00029.safetensors",
56
+ "model.layers.12.input_layernorm.weight": "model-00007-of-00029.safetensors",
57
+ "model.layers.12.mlp.down_proj.weight": "model-00007-of-00029.safetensors",
58
+ "model.layers.12.mlp.gate_proj.weight": "model-00006-of-00029.safetensors",
59
+ "model.layers.12.mlp.up_proj.weight": "model-00007-of-00029.safetensors",
60
+ "model.layers.12.post_attention_layernorm.weight": "model-00007-of-00029.safetensors",
61
+ "model.layers.12.self_attn.k_proj.bias": "model-00006-of-00029.safetensors",
62
+ "model.layers.12.self_attn.k_proj.weight": "model-00006-of-00029.safetensors",
63
+ "model.layers.12.self_attn.o_proj.weight": "model-00006-of-00029.safetensors",
64
+ "model.layers.12.self_attn.q_proj.bias": "model-00006-of-00029.safetensors",
65
+ "model.layers.12.self_attn.q_proj.weight": "model-00006-of-00029.safetensors",
66
+ "model.layers.12.self_attn.v_proj.bias": "model-00006-of-00029.safetensors",
67
+ "model.layers.12.self_attn.v_proj.weight": "model-00006-of-00029.safetensors",
68
+ "model.layers.13.input_layernorm.weight": "model-00007-of-00029.safetensors",
69
+ "model.layers.13.mlp.down_proj.weight": "model-00007-of-00029.safetensors",
70
+ "model.layers.13.mlp.gate_proj.weight": "model-00007-of-00029.safetensors",
71
+ "model.layers.13.mlp.up_proj.weight": "model-00007-of-00029.safetensors",
72
+ "model.layers.13.post_attention_layernorm.weight": "model-00007-of-00029.safetensors",
73
+ "model.layers.13.self_attn.k_proj.bias": "model-00007-of-00029.safetensors",
74
+ "model.layers.13.self_attn.k_proj.weight": "model-00007-of-00029.safetensors",
75
+ "model.layers.13.self_attn.o_proj.weight": "model-00007-of-00029.safetensors",
76
+ "model.layers.13.self_attn.q_proj.bias": "model-00007-of-00029.safetensors",
77
+ "model.layers.13.self_attn.q_proj.weight": "model-00007-of-00029.safetensors",
78
+ "model.layers.13.self_attn.v_proj.bias": "model-00007-of-00029.safetensors",
79
+ "model.layers.13.self_attn.v_proj.weight": "model-00007-of-00029.safetensors",
80
+ "model.layers.14.input_layernorm.weight": "model-00008-of-00029.safetensors",
81
+ "model.layers.14.mlp.down_proj.weight": "model-00008-of-00029.safetensors",
82
+ "model.layers.14.mlp.gate_proj.weight": "model-00007-of-00029.safetensors",
83
+ "model.layers.14.mlp.up_proj.weight": "model-00007-of-00029.safetensors",
84
+ "model.layers.14.post_attention_layernorm.weight": "model-00008-of-00029.safetensors",
85
+ "model.layers.14.self_attn.k_proj.bias": "model-00007-of-00029.safetensors",
86
+ "model.layers.14.self_attn.k_proj.weight": "model-00007-of-00029.safetensors",
87
+ "model.layers.14.self_attn.o_proj.weight": "model-00007-of-00029.safetensors",
88
+ "model.layers.14.self_attn.q_proj.bias": "model-00007-of-00029.safetensors",
89
+ "model.layers.14.self_attn.q_proj.weight": "model-00007-of-00029.safetensors",
90
+ "model.layers.14.self_attn.v_proj.bias": "model-00007-of-00029.safetensors",
91
+ "model.layers.14.self_attn.v_proj.weight": "model-00007-of-00029.safetensors",
92
+ "model.layers.15.input_layernorm.weight": "model-00008-of-00029.safetensors",
93
+ "model.layers.15.mlp.down_proj.weight": "model-00008-of-00029.safetensors",
94
+ "model.layers.15.mlp.gate_proj.weight": "model-00008-of-00029.safetensors",
95
+ "model.layers.15.mlp.up_proj.weight": "model-00008-of-00029.safetensors",
96
+ "model.layers.15.post_attention_layernorm.weight": "model-00008-of-00029.safetensors",
97
+ "model.layers.15.self_attn.k_proj.bias": "model-00008-of-00029.safetensors",
98
+ "model.layers.15.self_attn.k_proj.weight": "model-00008-of-00029.safetensors",
99
+ "model.layers.15.self_attn.o_proj.weight": "model-00008-of-00029.safetensors",
100
+ "model.layers.15.self_attn.q_proj.bias": "model-00008-of-00029.safetensors",
101
+ "model.layers.15.self_attn.q_proj.weight": "model-00008-of-00029.safetensors",
102
+ "model.layers.15.self_attn.v_proj.bias": "model-00008-of-00029.safetensors",
103
+ "model.layers.15.self_attn.v_proj.weight": "model-00008-of-00029.safetensors",
104
+ "model.layers.16.input_layernorm.weight": "model-00008-of-00029.safetensors",
105
+ "model.layers.16.mlp.down_proj.weight": "model-00008-of-00029.safetensors",
106
+ "model.layers.16.mlp.gate_proj.weight": "model-00008-of-00029.safetensors",
107
+ "model.layers.16.mlp.up_proj.weight": "model-00008-of-00029.safetensors",
108
+ "model.layers.16.post_attention_layernorm.weight": "model-00008-of-00029.safetensors",
109
+ "model.layers.16.self_attn.k_proj.bias": "model-00008-of-00029.safetensors",
110
+ "model.layers.16.self_attn.k_proj.weight": "model-00008-of-00029.safetensors",
111
+ "model.layers.16.self_attn.o_proj.weight": "model-00008-of-00029.safetensors",
112
+ "model.layers.16.self_attn.q_proj.bias": "model-00008-of-00029.safetensors",
113
+ "model.layers.16.self_attn.q_proj.weight": "model-00008-of-00029.safetensors",
114
+ "model.layers.16.self_attn.v_proj.bias": "model-00008-of-00029.safetensors",
115
+ "model.layers.16.self_attn.v_proj.weight": "model-00008-of-00029.safetensors",
116
+ "model.layers.17.input_layernorm.weight": "model-00009-of-00029.safetensors",
117
+ "model.layers.17.mlp.down_proj.weight": "model-00009-of-00029.safetensors",
118
+ "model.layers.17.mlp.gate_proj.weight": "model-00009-of-00029.safetensors",
119
+ "model.layers.17.mlp.up_proj.weight": "model-00009-of-00029.safetensors",
120
+ "model.layers.17.post_attention_layernorm.weight": "model-00009-of-00029.safetensors",
121
+ "model.layers.17.self_attn.k_proj.bias": "model-00008-of-00029.safetensors",
122
+ "model.layers.17.self_attn.k_proj.weight": "model-00008-of-00029.safetensors",
123
+ "model.layers.17.self_attn.o_proj.weight": "model-00008-of-00029.safetensors",
124
+ "model.layers.17.self_attn.q_proj.bias": "model-00008-of-00029.safetensors",
125
+ "model.layers.17.self_attn.q_proj.weight": "model-00008-of-00029.safetensors",
126
+ "model.layers.17.self_attn.v_proj.bias": "model-00008-of-00029.safetensors",
127
+ "model.layers.17.self_attn.v_proj.weight": "model-00008-of-00029.safetensors",
128
+ "model.layers.18.input_layernorm.weight": "model-00009-of-00029.safetensors",
129
+ "model.layers.18.mlp.down_proj.weight": "model-00009-of-00029.safetensors",
130
+ "model.layers.18.mlp.gate_proj.weight": "model-00009-of-00029.safetensors",
131
+ "model.layers.18.mlp.up_proj.weight": "model-00009-of-00029.safetensors",
132
+ "model.layers.18.post_attention_layernorm.weight": "model-00009-of-00029.safetensors",
133
+ "model.layers.18.self_attn.k_proj.bias": "model-00009-of-00029.safetensors",
134
+ "model.layers.18.self_attn.k_proj.weight": "model-00009-of-00029.safetensors",
135
+ "model.layers.18.self_attn.o_proj.weight": "model-00009-of-00029.safetensors",
136
+ "model.layers.18.self_attn.q_proj.bias": "model-00009-of-00029.safetensors",
137
+ "model.layers.18.self_attn.q_proj.weight": "model-00009-of-00029.safetensors",
138
+ "model.layers.18.self_attn.v_proj.bias": "model-00009-of-00029.safetensors",
139
+ "model.layers.18.self_attn.v_proj.weight": "model-00009-of-00029.safetensors",
140
+ "model.layers.19.input_layernorm.weight": "model-00010-of-00029.safetensors",
141
+ "model.layers.19.mlp.down_proj.weight": "model-00010-of-00029.safetensors",
142
+ "model.layers.19.mlp.gate_proj.weight": "model-00009-of-00029.safetensors",
143
+ "model.layers.19.mlp.up_proj.weight": "model-00010-of-00029.safetensors",
144
+ "model.layers.19.post_attention_layernorm.weight": "model-00010-of-00029.safetensors",
145
+ "model.layers.19.self_attn.k_proj.bias": "model-00009-of-00029.safetensors",
146
+ "model.layers.19.self_attn.k_proj.weight": "model-00009-of-00029.safetensors",
147
+ "model.layers.19.self_attn.o_proj.weight": "model-00009-of-00029.safetensors",
148
+ "model.layers.19.self_attn.q_proj.bias": "model-00009-of-00029.safetensors",
149
+ "model.layers.19.self_attn.q_proj.weight": "model-00009-of-00029.safetensors",
150
+ "model.layers.19.self_attn.v_proj.bias": "model-00009-of-00029.safetensors",
151
+ "model.layers.19.self_attn.v_proj.weight": "model-00009-of-00029.safetensors",
152
+ "model.layers.2.input_layernorm.weight": "model-00002-of-00029.safetensors",
153
+ "model.layers.2.mlp.down_proj.weight": "model-00002-of-00029.safetensors",
154
+ "model.layers.2.mlp.gate_proj.weight": "model-00002-of-00029.safetensors",
155
+ "model.layers.2.mlp.up_proj.weight": "model-00002-of-00029.safetensors",
156
+ "model.layers.2.post_attention_layernorm.weight": "model-00002-of-00029.safetensors",
157
+ "model.layers.2.self_attn.k_proj.bias": "model-00002-of-00029.safetensors",
158
+ "model.layers.2.self_attn.k_proj.weight": "model-00002-of-00029.safetensors",
159
+ "model.layers.2.self_attn.o_proj.weight": "model-00002-of-00029.safetensors",
160
+ "model.layers.2.self_attn.q_proj.bias": "model-00002-of-00029.safetensors",
161
+ "model.layers.2.self_attn.q_proj.weight": "model-00002-of-00029.safetensors",
162
+ "model.layers.2.self_attn.v_proj.bias": "model-00002-of-00029.safetensors",
163
+ "model.layers.2.self_attn.v_proj.weight": "model-00002-of-00029.safetensors",
164
+ "model.layers.20.input_layernorm.weight": "model-00010-of-00029.safetensors",
165
+ "model.layers.20.mlp.down_proj.weight": "model-00010-of-00029.safetensors",
166
+ "model.layers.20.mlp.gate_proj.weight": "model-00010-of-00029.safetensors",
167
+ "model.layers.20.mlp.up_proj.weight": "model-00010-of-00029.safetensors",
168
+ "model.layers.20.post_attention_layernorm.weight": "model-00010-of-00029.safetensors",
169
+ "model.layers.20.self_attn.k_proj.bias": "model-00010-of-00029.safetensors",
170
+ "model.layers.20.self_attn.k_proj.weight": "model-00010-of-00029.safetensors",
171
+ "model.layers.20.self_attn.o_proj.weight": "model-00010-of-00029.safetensors",
172
+ "model.layers.20.self_attn.q_proj.bias": "model-00010-of-00029.safetensors",
173
+ "model.layers.20.self_attn.q_proj.weight": "model-00010-of-00029.safetensors",
174
+ "model.layers.20.self_attn.v_proj.bias": "model-00010-of-00029.safetensors",
175
+ "model.layers.20.self_attn.v_proj.weight": "model-00010-of-00029.safetensors",
176
+ "model.layers.21.input_layernorm.weight": "model-00011-of-00029.safetensors",
177
+ "model.layers.21.mlp.down_proj.weight": "model-00011-of-00029.safetensors",
178
+ "model.layers.21.mlp.gate_proj.weight": "model-00010-of-00029.safetensors",
179
+ "model.layers.21.mlp.up_proj.weight": "model-00010-of-00029.safetensors",
180
+ "model.layers.21.post_attention_layernorm.weight": "model-00011-of-00029.safetensors",
181
+ "model.layers.21.self_attn.k_proj.bias": "model-00010-of-00029.safetensors",
182
+ "model.layers.21.self_attn.k_proj.weight": "model-00010-of-00029.safetensors",
183
+ "model.layers.21.self_attn.o_proj.weight": "model-00010-of-00029.safetensors",
184
+ "model.layers.21.self_attn.q_proj.bias": "model-00010-of-00029.safetensors",
185
+ "model.layers.21.self_attn.q_proj.weight": "model-00010-of-00029.safetensors",
186
+ "model.layers.21.self_attn.v_proj.bias": "model-00010-of-00029.safetensors",
187
+ "model.layers.21.self_attn.v_proj.weight": "model-00010-of-00029.safetensors",
188
+ "model.layers.22.input_layernorm.weight": "model-00011-of-00029.safetensors",
189
+ "model.layers.22.mlp.down_proj.weight": "model-00011-of-00029.safetensors",
190
+ "model.layers.22.mlp.gate_proj.weight": "model-00011-of-00029.safetensors",
191
+ "model.layers.22.mlp.up_proj.weight": "model-00011-of-00029.safetensors",
192
+ "model.layers.22.post_attention_layernorm.weight": "model-00011-of-00029.safetensors",
193
+ "model.layers.22.self_attn.k_proj.bias": "model-00011-of-00029.safetensors",
194
+ "model.layers.22.self_attn.k_proj.weight": "model-00011-of-00029.safetensors",
195
+ "model.layers.22.self_attn.o_proj.weight": "model-00011-of-00029.safetensors",
196
+ "model.layers.22.self_attn.q_proj.bias": "model-00011-of-00029.safetensors",
197
+ "model.layers.22.self_attn.q_proj.weight": "model-00011-of-00029.safetensors",
198
+ "model.layers.22.self_attn.v_proj.bias": "model-00011-of-00029.safetensors",
199
+ "model.layers.22.self_attn.v_proj.weight": "model-00011-of-00029.safetensors",
200
+ "model.layers.23.input_layernorm.weight": "model-00011-of-00029.safetensors",
201
+ "model.layers.23.mlp.down_proj.weight": "model-00011-of-00029.safetensors",
202
+ "model.layers.23.mlp.gate_proj.weight": "model-00011-of-00029.safetensors",
203
+ "model.layers.23.mlp.up_proj.weight": "model-00011-of-00029.safetensors",
204
+ "model.layers.23.post_attention_layernorm.weight": "model-00011-of-00029.safetensors",
205
+ "model.layers.23.self_attn.k_proj.bias": "model-00011-of-00029.safetensors",
206
+ "model.layers.23.self_attn.k_proj.weight": "model-00011-of-00029.safetensors",
207
+ "model.layers.23.self_attn.o_proj.weight": "model-00011-of-00029.safetensors",
208
+ "model.layers.23.self_attn.q_proj.bias": "model-00011-of-00029.safetensors",
209
+ "model.layers.23.self_attn.q_proj.weight": "model-00011-of-00029.safetensors",
210
+ "model.layers.23.self_attn.v_proj.bias": "model-00011-of-00029.safetensors",
211
+ "model.layers.23.self_attn.v_proj.weight": "model-00011-of-00029.safetensors",
212
+ "model.layers.24.input_layernorm.weight": "model-00012-of-00029.safetensors",
213
+ "model.layers.24.mlp.down_proj.weight": "model-00012-of-00029.safetensors",
214
+ "model.layers.24.mlp.gate_proj.weight": "model-00012-of-00029.safetensors",
215
+ "model.layers.24.mlp.up_proj.weight": "model-00012-of-00029.safetensors",
216
+ "model.layers.24.post_attention_layernorm.weight": "model-00012-of-00029.safetensors",
217
+ "model.layers.24.self_attn.k_proj.bias": "model-00011-of-00029.safetensors",
218
+ "model.layers.24.self_attn.k_proj.weight": "model-00011-of-00029.safetensors",
219
+ "model.layers.24.self_attn.o_proj.weight": "model-00011-of-00029.safetensors",
220
+ "model.layers.24.self_attn.q_proj.bias": "model-00011-of-00029.safetensors",
221
+ "model.layers.24.self_attn.q_proj.weight": "model-00011-of-00029.safetensors",
222
+ "model.layers.24.self_attn.v_proj.bias": "model-00011-of-00029.safetensors",
223
+ "model.layers.24.self_attn.v_proj.weight": "model-00011-of-00029.safetensors",
224
+ "model.layers.25.input_layernorm.weight": "model-00012-of-00029.safetensors",
225
+ "model.layers.25.mlp.down_proj.weight": "model-00012-of-00029.safetensors",
226
+ "model.layers.25.mlp.gate_proj.weight": "model-00012-of-00029.safetensors",
227
+ "model.layers.25.mlp.up_proj.weight": "model-00012-of-00029.safetensors",
228
+ "model.layers.25.post_attention_layernorm.weight": "model-00012-of-00029.safetensors",
229
+ "model.layers.25.self_attn.k_proj.bias": "model-00012-of-00029.safetensors",
230
+ "model.layers.25.self_attn.k_proj.weight": "model-00012-of-00029.safetensors",
231
+ "model.layers.25.self_attn.o_proj.weight": "model-00012-of-00029.safetensors",
232
+ "model.layers.25.self_attn.q_proj.bias": "model-00012-of-00029.safetensors",
233
+ "model.layers.25.self_attn.q_proj.weight": "model-00012-of-00029.safetensors",
234
+ "model.layers.25.self_attn.v_proj.bias": "model-00012-of-00029.safetensors",
235
+ "model.layers.25.self_attn.v_proj.weight": "model-00012-of-00029.safetensors",
236
+ "model.layers.26.input_layernorm.weight": "model-00013-of-00029.safetensors",
237
+ "model.layers.26.mlp.down_proj.weight": "model-00013-of-00029.safetensors",
238
+ "model.layers.26.mlp.gate_proj.weight": "model-00012-of-00029.safetensors",
239
+ "model.layers.26.mlp.up_proj.weight": "model-00013-of-00029.safetensors",
240
+ "model.layers.26.post_attention_layernorm.weight": "model-00013-of-00029.safetensors",
241
+ "model.layers.26.self_attn.k_proj.bias": "model-00012-of-00029.safetensors",
242
+ "model.layers.26.self_attn.k_proj.weight": "model-00012-of-00029.safetensors",
243
+ "model.layers.26.self_attn.o_proj.weight": "model-00012-of-00029.safetensors",
244
+ "model.layers.26.self_attn.q_proj.bias": "model-00012-of-00029.safetensors",
245
+ "model.layers.26.self_attn.q_proj.weight": "model-00012-of-00029.safetensors",
246
+ "model.layers.26.self_attn.v_proj.bias": "model-00012-of-00029.safetensors",
247
+ "model.layers.26.self_attn.v_proj.weight": "model-00012-of-00029.safetensors",
248
+ "model.layers.27.input_layernorm.weight": "model-00013-of-00029.safetensors",
249
+ "model.layers.27.mlp.down_proj.weight": "model-00013-of-00029.safetensors",
250
+ "model.layers.27.mlp.gate_proj.weight": "model-00013-of-00029.safetensors",
251
+ "model.layers.27.mlp.up_proj.weight": "model-00013-of-00029.safetensors",
252
+ "model.layers.27.post_attention_layernorm.weight": "model-00013-of-00029.safetensors",
253
+ "model.layers.27.self_attn.k_proj.bias": "model-00013-of-00029.safetensors",
254
+ "model.layers.27.self_attn.k_proj.weight": "model-00013-of-00029.safetensors",
255
+ "model.layers.27.self_attn.o_proj.weight": "model-00013-of-00029.safetensors",
256
+ "model.layers.27.self_attn.q_proj.bias": "model-00013-of-00029.safetensors",
257
+ "model.layers.27.self_attn.q_proj.weight": "model-00013-of-00029.safetensors",
258
+ "model.layers.27.self_attn.v_proj.bias": "model-00013-of-00029.safetensors",
259
+ "model.layers.27.self_attn.v_proj.weight": "model-00013-of-00029.safetensors",
260
+ "model.layers.28.input_layernorm.weight": "model-00014-of-00029.safetensors",
261
+ "model.layers.28.mlp.down_proj.weight": "model-00014-of-00029.safetensors",
262
+ "model.layers.28.mlp.gate_proj.weight": "model-00013-of-00029.safetensors",
263
+ "model.layers.28.mlp.up_proj.weight": "model-00013-of-00029.safetensors",
264
+ "model.layers.28.post_attention_layernorm.weight": "model-00014-of-00029.safetensors",
265
+ "model.layers.28.self_attn.k_proj.bias": "model-00013-of-00029.safetensors",
266
+ "model.layers.28.self_attn.k_proj.weight": "model-00013-of-00029.safetensors",
267
+ "model.layers.28.self_attn.o_proj.weight": "model-00013-of-00029.safetensors",
268
+ "model.layers.28.self_attn.q_proj.bias": "model-00013-of-00029.safetensors",
269
+ "model.layers.28.self_attn.q_proj.weight": "model-00013-of-00029.safetensors",
270
+ "model.layers.28.self_attn.v_proj.bias": "model-00013-of-00029.safetensors",
271
+ "model.layers.28.self_attn.v_proj.weight": "model-00013-of-00029.safetensors",
272
+ "model.layers.29.input_layernorm.weight": "model-00014-of-00029.safetensors",
273
+ "model.layers.29.mlp.down_proj.weight": "model-00014-of-00029.safetensors",
274
+ "model.layers.29.mlp.gate_proj.weight": "model-00014-of-00029.safetensors",
275
+ "model.layers.29.mlp.up_proj.weight": "model-00014-of-00029.safetensors",
276
+ "model.layers.29.post_attention_layernorm.weight": "model-00014-of-00029.safetensors",
277
+ "model.layers.29.self_attn.k_proj.bias": "model-00014-of-00029.safetensors",
278
+ "model.layers.29.self_attn.k_proj.weight": "model-00014-of-00029.safetensors",
279
+ "model.layers.29.self_attn.o_proj.weight": "model-00014-of-00029.safetensors",
280
+ "model.layers.29.self_attn.q_proj.bias": "model-00014-of-00029.safetensors",
281
+ "model.layers.29.self_attn.q_proj.weight": "model-00014-of-00029.safetensors",
282
+ "model.layers.29.self_attn.v_proj.bias": "model-00014-of-00029.safetensors",
283
+ "model.layers.29.self_attn.v_proj.weight": "model-00014-of-00029.safetensors",
284
+ "model.layers.3.input_layernorm.weight": "model-00003-of-00029.safetensors",
285
+ "model.layers.3.mlp.down_proj.weight": "model-00003-of-00029.safetensors",
286
+ "model.layers.3.mlp.gate_proj.weight": "model-00003-of-00029.safetensors",
287
+ "model.layers.3.mlp.up_proj.weight": "model-00003-of-00029.safetensors",
288
+ "model.layers.3.post_attention_layernorm.weight": "model-00003-of-00029.safetensors",
289
+ "model.layers.3.self_attn.k_proj.bias": "model-00002-of-00029.safetensors",
290
+ "model.layers.3.self_attn.k_proj.weight": "model-00002-of-00029.safetensors",
291
+ "model.layers.3.self_attn.o_proj.weight": "model-00002-of-00029.safetensors",
292
+ "model.layers.3.self_attn.q_proj.bias": "model-00002-of-00029.safetensors",
293
+ "model.layers.3.self_attn.q_proj.weight": "model-00002-of-00029.safetensors",
294
+ "model.layers.3.self_attn.v_proj.bias": "model-00002-of-00029.safetensors",
295
+ "model.layers.3.self_attn.v_proj.weight": "model-00002-of-00029.safetensors",
296
+ "model.layers.30.input_layernorm.weight": "model-00014-of-00029.safetensors",
297
+ "model.layers.30.mlp.down_proj.weight": "model-00014-of-00029.safetensors",
298
+ "model.layers.30.mlp.gate_proj.weight": "model-00014-of-00029.safetensors",
299
+ "model.layers.30.mlp.up_proj.weight": "model-00014-of-00029.safetensors",
300
+ "model.layers.30.post_attention_layernorm.weight": "model-00014-of-00029.safetensors",
301
+ "model.layers.30.self_attn.k_proj.bias": "model-00014-of-00029.safetensors",
302
+ "model.layers.30.self_attn.k_proj.weight": "model-00014-of-00029.safetensors",
303
+ "model.layers.30.self_attn.o_proj.weight": "model-00014-of-00029.safetensors",
304
+ "model.layers.30.self_attn.q_proj.bias": "model-00014-of-00029.safetensors",
305
+ "model.layers.30.self_attn.q_proj.weight": "model-00014-of-00029.safetensors",
306
+ "model.layers.30.self_attn.v_proj.bias": "model-00014-of-00029.safetensors",
307
+ "model.layers.30.self_attn.v_proj.weight": "model-00014-of-00029.safetensors",
308
+ "model.layers.31.input_layernorm.weight": "model-00015-of-00029.safetensors",
309
+ "model.layers.31.mlp.down_proj.weight": "model-00015-of-00029.safetensors",
310
+ "model.layers.31.mlp.gate_proj.weight": "model-00015-of-00029.safetensors",
311
+ "model.layers.31.mlp.up_proj.weight": "model-00015-of-00029.safetensors",
312
+ "model.layers.31.post_attention_layernorm.weight": "model-00015-of-00029.safetensors",
313
+ "model.layers.31.self_attn.k_proj.bias": "model-00014-of-00029.safetensors",
314
+ "model.layers.31.self_attn.k_proj.weight": "model-00014-of-00029.safetensors",
315
+ "model.layers.31.self_attn.o_proj.weight": "model-00014-of-00029.safetensors",
316
+ "model.layers.31.self_attn.q_proj.bias": "model-00014-of-00029.safetensors",
317
+ "model.layers.31.self_attn.q_proj.weight": "model-00014-of-00029.safetensors",
318
+ "model.layers.31.self_attn.v_proj.bias": "model-00014-of-00029.safetensors",
319
+ "model.layers.31.self_attn.v_proj.weight": "model-00014-of-00029.safetensors",
320
+ "model.layers.32.input_layernorm.weight": "model-00015-of-00029.safetensors",
321
+ "model.layers.32.mlp.down_proj.weight": "model-00015-of-00029.safetensors",
322
+ "model.layers.32.mlp.gate_proj.weight": "model-00015-of-00029.safetensors",
323
+ "model.layers.32.mlp.up_proj.weight": "model-00015-of-00029.safetensors",
324
+ "model.layers.32.post_attention_layernorm.weight": "model-00015-of-00029.safetensors",
325
+ "model.layers.32.self_attn.k_proj.bias": "model-00015-of-00029.safetensors",
326
+ "model.layers.32.self_attn.k_proj.weight": "model-00015-of-00029.safetensors",
327
+ "model.layers.32.self_attn.o_proj.weight": "model-00015-of-00029.safetensors",
328
+ "model.layers.32.self_attn.q_proj.bias": "model-00015-of-00029.safetensors",
329
+ "model.layers.32.self_attn.q_proj.weight": "model-00015-of-00029.safetensors",
330
+ "model.layers.32.self_attn.v_proj.bias": "model-00015-of-00029.safetensors",
331
+ "model.layers.32.self_attn.v_proj.weight": "model-00015-of-00029.safetensors",
332
+ "model.layers.33.input_layernorm.weight": "model-00016-of-00029.safetensors",
333
+ "model.layers.33.mlp.down_proj.weight": "model-00016-of-00029.safetensors",
334
+ "model.layers.33.mlp.gate_proj.weight": "model-00015-of-00029.safetensors",
335
+ "model.layers.33.mlp.up_proj.weight": "model-00016-of-00029.safetensors",
336
+ "model.layers.33.post_attention_layernorm.weight": "model-00016-of-00029.safetensors",
337
+ "model.layers.33.self_attn.k_proj.bias": "model-00015-of-00029.safetensors",
338
+ "model.layers.33.self_attn.k_proj.weight": "model-00015-of-00029.safetensors",
339
+ "model.layers.33.self_attn.o_proj.weight": "model-00015-of-00029.safetensors",
340
+ "model.layers.33.self_attn.q_proj.bias": "model-00015-of-00029.safetensors",
341
+ "model.layers.33.self_attn.q_proj.weight": "model-00015-of-00029.safetensors",
342
+ "model.layers.33.self_attn.v_proj.bias": "model-00015-of-00029.safetensors",
343
+ "model.layers.33.self_attn.v_proj.weight": "model-00015-of-00029.safetensors",
344
+ "model.layers.34.input_layernorm.weight": "model-00016-of-00029.safetensors",
345
+ "model.layers.34.mlp.down_proj.weight": "model-00016-of-00029.safetensors",
346
+ "model.layers.34.mlp.gate_proj.weight": "model-00016-of-00029.safetensors",
347
+ "model.layers.34.mlp.up_proj.weight": "model-00016-of-00029.safetensors",
348
+ "model.layers.34.post_attention_layernorm.weight": "model-00016-of-00029.safetensors",
349
+ "model.layers.34.self_attn.k_proj.bias": "model-00016-of-00029.safetensors",
350
+ "model.layers.34.self_attn.k_proj.weight": "model-00016-of-00029.safetensors",
351
+ "model.layers.34.self_attn.o_proj.weight": "model-00016-of-00029.safetensors",
352
+ "model.layers.34.self_attn.q_proj.bias": "model-00016-of-00029.safetensors",
353
+ "model.layers.34.self_attn.q_proj.weight": "model-00016-of-00029.safetensors",
354
+ "model.layers.34.self_attn.v_proj.bias": "model-00016-of-00029.safetensors",
355
+ "model.layers.34.self_attn.v_proj.weight": "model-00016-of-00029.safetensors",
356
+ "model.layers.35.input_layernorm.weight": "model-00017-of-00029.safetensors",
357
+ "model.layers.35.mlp.down_proj.weight": "model-00017-of-00029.safetensors",
358
+ "model.layers.35.mlp.gate_proj.weight": "model-00016-of-00029.safetensors",
359
+ "model.layers.35.mlp.up_proj.weight": "model-00016-of-00029.safetensors",
360
+ "model.layers.35.post_attention_layernorm.weight": "model-00017-of-00029.safetensors",
361
+ "model.layers.35.self_attn.k_proj.bias": "model-00016-of-00029.safetensors",
362
+ "model.layers.35.self_attn.k_proj.weight": "model-00016-of-00029.safetensors",
363
+ "model.layers.35.self_attn.o_proj.weight": "model-00016-of-00029.safetensors",
364
+ "model.layers.35.self_attn.q_proj.bias": "model-00016-of-00029.safetensors",
365
+ "model.layers.35.self_attn.q_proj.weight": "model-00016-of-00029.safetensors",
366
+ "model.layers.35.self_attn.v_proj.bias": "model-00016-of-00029.safetensors",
367
+ "model.layers.35.self_attn.v_proj.weight": "model-00016-of-00029.safetensors",
368
+ "model.layers.36.input_layernorm.weight": "model-00017-of-00029.safetensors",
369
+ "model.layers.36.mlp.down_proj.weight": "model-00017-of-00029.safetensors",
370
+ "model.layers.36.mlp.gate_proj.weight": "model-00017-of-00029.safetensors",
371
+ "model.layers.36.mlp.up_proj.weight": "model-00017-of-00029.safetensors",
372
+ "model.layers.36.post_attention_layernorm.weight": "model-00017-of-00029.safetensors",
373
+ "model.layers.36.self_attn.k_proj.bias": "model-00017-of-00029.safetensors",
374
+ "model.layers.36.self_attn.k_proj.weight": "model-00017-of-00029.safetensors",
375
+ "model.layers.36.self_attn.o_proj.weight": "model-00017-of-00029.safetensors",
376
+ "model.layers.36.self_attn.q_proj.bias": "model-00017-of-00029.safetensors",
377
+ "model.layers.36.self_attn.q_proj.weight": "model-00017-of-00029.safetensors",
378
+ "model.layers.36.self_attn.v_proj.bias": "model-00017-of-00029.safetensors",
379
+ "model.layers.36.self_attn.v_proj.weight": "model-00017-of-00029.safetensors",
380
+ "model.layers.37.input_layernorm.weight": "model-00017-of-00029.safetensors",
381
+ "model.layers.37.mlp.down_proj.weight": "model-00017-of-00029.safetensors",
382
+ "model.layers.37.mlp.gate_proj.weight": "model-00017-of-00029.safetensors",
383
+ "model.layers.37.mlp.up_proj.weight": "model-00017-of-00029.safetensors",
384
+ "model.layers.37.post_attention_layernorm.weight": "model-00017-of-00029.safetensors",
385
+ "model.layers.37.self_attn.k_proj.bias": "model-00017-of-00029.safetensors",
386
+ "model.layers.37.self_attn.k_proj.weight": "model-00017-of-00029.safetensors",
387
+ "model.layers.37.self_attn.o_proj.weight": "model-00017-of-00029.safetensors",
388
+ "model.layers.37.self_attn.q_proj.bias": "model-00017-of-00029.safetensors",
389
+ "model.layers.37.self_attn.q_proj.weight": "model-00017-of-00029.safetensors",
390
+ "model.layers.37.self_attn.v_proj.bias": "model-00017-of-00029.safetensors",
391
+ "model.layers.37.self_attn.v_proj.weight": "model-00017-of-00029.safetensors",
392
+ "model.layers.38.input_layernorm.weight": "model-00018-of-00029.safetensors",
393
+ "model.layers.38.mlp.down_proj.weight": "model-00018-of-00029.safetensors",
394
+ "model.layers.38.mlp.gate_proj.weight": "model-00018-of-00029.safetensors",
395
+ "model.layers.38.mlp.up_proj.weight": "model-00018-of-00029.safetensors",
396
+ "model.layers.38.post_attention_layernorm.weight": "model-00018-of-00029.safetensors",
397
+ "model.layers.38.self_attn.k_proj.bias": "model-00017-of-00029.safetensors",
398
+ "model.layers.38.self_attn.k_proj.weight": "model-00017-of-00029.safetensors",
399
+ "model.layers.38.self_attn.o_proj.weight": "model-00017-of-00029.safetensors",
400
+ "model.layers.38.self_attn.q_proj.bias": "model-00017-of-00029.safetensors",
401
+ "model.layers.38.self_attn.q_proj.weight": "model-00017-of-00029.safetensors",
402
+ "model.layers.38.self_attn.v_proj.bias": "model-00017-of-00029.safetensors",
403
+ "model.layers.38.self_attn.v_proj.weight": "model-00017-of-00029.safetensors",
404
+ "model.layers.39.input_layernorm.weight": "model-00018-of-00029.safetensors",
405
+ "model.layers.39.mlp.down_proj.weight": "model-00018-of-00029.safetensors",
406
+ "model.layers.39.mlp.gate_proj.weight": "model-00018-of-00029.safetensors",
407
+ "model.layers.39.mlp.up_proj.weight": "model-00018-of-00029.safetensors",
408
+ "model.layers.39.post_attention_layernorm.weight": "model-00018-of-00029.safetensors",
409
+ "model.layers.39.self_attn.k_proj.bias": "model-00018-of-00029.safetensors",
410
+ "model.layers.39.self_attn.k_proj.weight": "model-00018-of-00029.safetensors",
411
+ "model.layers.39.self_attn.o_proj.weight": "model-00018-of-00029.safetensors",
412
+ "model.layers.39.self_attn.q_proj.bias": "model-00018-of-00029.safetensors",
413
+ "model.layers.39.self_attn.q_proj.weight": "model-00018-of-00029.safetensors",
414
+ "model.layers.39.self_attn.v_proj.bias": "model-00018-of-00029.safetensors",
415
+ "model.layers.39.self_attn.v_proj.weight": "model-00018-of-00029.safetensors",
416
+ "model.layers.4.input_layernorm.weight": "model-00003-of-00029.safetensors",
417
+ "model.layers.4.mlp.down_proj.weight": "model-00003-of-00029.safetensors",
418
+ "model.layers.4.mlp.gate_proj.weight": "model-00003-of-00029.safetensors",
419
+ "model.layers.4.mlp.up_proj.weight": "model-00003-of-00029.safetensors",
420
+ "model.layers.4.post_attention_layernorm.weight": "model-00003-of-00029.safetensors",
421
+ "model.layers.4.self_attn.k_proj.bias": "model-00003-of-00029.safetensors",
422
+ "model.layers.4.self_attn.k_proj.weight": "model-00003-of-00029.safetensors",
423
+ "model.layers.4.self_attn.o_proj.weight": "model-00003-of-00029.safetensors",
424
+ "model.layers.4.self_attn.q_proj.bias": "model-00003-of-00029.safetensors",
425
+ "model.layers.4.self_attn.q_proj.weight": "model-00003-of-00029.safetensors",
426
+ "model.layers.4.self_attn.v_proj.bias": "model-00003-of-00029.safetensors",
427
+ "model.layers.4.self_attn.v_proj.weight": "model-00003-of-00029.safetensors",
428
+ "model.layers.40.input_layernorm.weight": "model-00019-of-00029.safetensors",
429
+ "model.layers.40.mlp.down_proj.weight": "model-00019-of-00029.safetensors",
430
+ "model.layers.40.mlp.gate_proj.weight": "model-00018-of-00029.safetensors",
431
+ "model.layers.40.mlp.up_proj.weight": "model-00019-of-00029.safetensors",
432
+ "model.layers.40.post_attention_layernorm.weight": "model-00019-of-00029.safetensors",
433
+ "model.layers.40.self_attn.k_proj.bias": "model-00018-of-00029.safetensors",
434
+ "model.layers.40.self_attn.k_proj.weight": "model-00018-of-00029.safetensors",
435
+ "model.layers.40.self_attn.o_proj.weight": "model-00018-of-00029.safetensors",
436
+ "model.layers.40.self_attn.q_proj.bias": "model-00018-of-00029.safetensors",
437
+ "model.layers.40.self_attn.q_proj.weight": "model-00018-of-00029.safetensors",
438
+ "model.layers.40.self_attn.v_proj.bias": "model-00018-of-00029.safetensors",
439
+ "model.layers.40.self_attn.v_proj.weight": "model-00018-of-00029.safetensors",
440
+ "model.layers.41.input_layernorm.weight": "model-00019-of-00029.safetensors",
441
+ "model.layers.41.mlp.down_proj.weight": "model-00019-of-00029.safetensors",
442
+ "model.layers.41.mlp.gate_proj.weight": "model-00019-of-00029.safetensors",
443
+ "model.layers.41.mlp.up_proj.weight": "model-00019-of-00029.safetensors",
444
+ "model.layers.41.post_attention_layernorm.weight": "model-00019-of-00029.safetensors",
445
+ "model.layers.41.self_attn.k_proj.bias": "model-00019-of-00029.safetensors",
446
+ "model.layers.41.self_attn.k_proj.weight": "model-00019-of-00029.safetensors",
447
+ "model.layers.41.self_attn.o_proj.weight": "model-00019-of-00029.safetensors",
448
+ "model.layers.41.self_attn.q_proj.bias": "model-00019-of-00029.safetensors",
449
+ "model.layers.41.self_attn.q_proj.weight": "model-00019-of-00029.safetensors",
450
+ "model.layers.41.self_attn.v_proj.bias": "model-00019-of-00029.safetensors",
451
+ "model.layers.41.self_attn.v_proj.weight": "model-00019-of-00029.safetensors",
452
+ "model.layers.42.input_layernorm.weight": "model-00020-of-00029.safetensors",
453
+ "model.layers.42.mlp.down_proj.weight": "model-00020-of-00029.safetensors",
454
+ "model.layers.42.mlp.gate_proj.weight": "model-00019-of-00029.safetensors",
455
+ "model.layers.42.mlp.up_proj.weight": "model-00019-of-00029.safetensors",
456
+ "model.layers.42.post_attention_layernorm.weight": "model-00020-of-00029.safetensors",
457
+ "model.layers.42.self_attn.k_proj.bias": "model-00019-of-00029.safetensors",
458
+ "model.layers.42.self_attn.k_proj.weight": "model-00019-of-00029.safetensors",
459
+ "model.layers.42.self_attn.o_proj.weight": "model-00019-of-00029.safetensors",
460
+ "model.layers.42.self_attn.q_proj.bias": "model-00019-of-00029.safetensors",
461
+ "model.layers.42.self_attn.q_proj.weight": "model-00019-of-00029.safetensors",
462
+ "model.layers.42.self_attn.v_proj.bias": "model-00019-of-00029.safetensors",
463
+ "model.layers.42.self_attn.v_proj.weight": "model-00019-of-00029.safetensors",
464
+ "model.layers.43.input_layernorm.weight": "model-00020-of-00029.safetensors",
465
+ "model.layers.43.mlp.down_proj.weight": "model-00020-of-00029.safetensors",
466
+ "model.layers.43.mlp.gate_proj.weight": "model-00020-of-00029.safetensors",
467
+ "model.layers.43.mlp.up_proj.weight": "model-00020-of-00029.safetensors",
468
+ "model.layers.43.post_attention_layernorm.weight": "model-00020-of-00029.safetensors",
469
+ "model.layers.43.self_attn.k_proj.bias": "model-00020-of-00029.safetensors",
470
+ "model.layers.43.self_attn.k_proj.weight": "model-00020-of-00029.safetensors",
471
+ "model.layers.43.self_attn.o_proj.weight": "model-00020-of-00029.safetensors",
472
+ "model.layers.43.self_attn.q_proj.bias": "model-00020-of-00029.safetensors",
473
+ "model.layers.43.self_attn.q_proj.weight": "model-00020-of-00029.safetensors",
474
+ "model.layers.43.self_attn.v_proj.bias": "model-00020-of-00029.safetensors",
475
+ "model.layers.43.self_attn.v_proj.weight": "model-00020-of-00029.safetensors",
476
+ "model.layers.44.input_layernorm.weight": "model-00020-of-00029.safetensors",
477
+ "model.layers.44.mlp.down_proj.weight": "model-00020-of-00029.safetensors",
478
+ "model.layers.44.mlp.gate_proj.weight": "model-00020-of-00029.safetensors",
479
+ "model.layers.44.mlp.up_proj.weight": "model-00020-of-00029.safetensors",
480
+ "model.layers.44.post_attention_layernorm.weight": "model-00020-of-00029.safetensors",
481
+ "model.layers.44.self_attn.k_proj.bias": "model-00020-of-00029.safetensors",
482
+ "model.layers.44.self_attn.k_proj.weight": "model-00020-of-00029.safetensors",
483
+ "model.layers.44.self_attn.o_proj.weight": "model-00020-of-00029.safetensors",
484
+ "model.layers.44.self_attn.q_proj.bias": "model-00020-of-00029.safetensors",
485
+ "model.layers.44.self_attn.q_proj.weight": "model-00020-of-00029.safetensors",
486
+ "model.layers.44.self_attn.v_proj.bias": "model-00020-of-00029.safetensors",
487
+ "model.layers.44.self_attn.v_proj.weight": "model-00020-of-00029.safetensors",
488
+ "model.layers.45.input_layernorm.weight": "model-00021-of-00029.safetensors",
489
+ "model.layers.45.mlp.down_proj.weight": "model-00021-of-00029.safetensors",
490
+ "model.layers.45.mlp.gate_proj.weight": "model-00021-of-00029.safetensors",
491
+ "model.layers.45.mlp.up_proj.weight": "model-00021-of-00029.safetensors",
492
+ "model.layers.45.post_attention_layernorm.weight": "model-00021-of-00029.safetensors",
493
+ "model.layers.45.self_attn.k_proj.bias": "model-00020-of-00029.safetensors",
494
+ "model.layers.45.self_attn.k_proj.weight": "model-00020-of-00029.safetensors",
495
+ "model.layers.45.self_attn.o_proj.weight": "model-00020-of-00029.safetensors",
496
+ "model.layers.45.self_attn.q_proj.bias": "model-00020-of-00029.safetensors",
497
+ "model.layers.45.self_attn.q_proj.weight": "model-00020-of-00029.safetensors",
498
+ "model.layers.45.self_attn.v_proj.bias": "model-00020-of-00029.safetensors",
499
+ "model.layers.45.self_attn.v_proj.weight": "model-00020-of-00029.safetensors",
500
+ "model.layers.46.input_layernorm.weight": "model-00021-of-00029.safetensors",
501
+ "model.layers.46.mlp.down_proj.weight": "model-00021-of-00029.safetensors",
502
+ "model.layers.46.mlp.gate_proj.weight": "model-00021-of-00029.safetensors",
503
+ "model.layers.46.mlp.up_proj.weight": "model-00021-of-00029.safetensors",
504
+ "model.layers.46.post_attention_layernorm.weight": "model-00021-of-00029.safetensors",
505
+ "model.layers.46.self_attn.k_proj.bias": "model-00021-of-00029.safetensors",
506
+ "model.layers.46.self_attn.k_proj.weight": "model-00021-of-00029.safetensors",
507
+ "model.layers.46.self_attn.o_proj.weight": "model-00021-of-00029.safetensors",
508
+ "model.layers.46.self_attn.q_proj.bias": "model-00021-of-00029.safetensors",
509
+ "model.layers.46.self_attn.q_proj.weight": "model-00021-of-00029.safetensors",
510
+ "model.layers.46.self_attn.v_proj.bias": "model-00021-of-00029.safetensors",
511
+ "model.layers.46.self_attn.v_proj.weight": "model-00021-of-00029.safetensors",
512
+ "model.layers.47.input_layernorm.weight": "model-00022-of-00029.safetensors",
513
+ "model.layers.47.mlp.down_proj.weight": "model-00022-of-00029.safetensors",
514
+ "model.layers.47.mlp.gate_proj.weight": "model-00021-of-00029.safetensors",
515
+ "model.layers.47.mlp.up_proj.weight": "model-00022-of-00029.safetensors",
516
+ "model.layers.47.post_attention_layernorm.weight": "model-00022-of-00029.safetensors",
517
+ "model.layers.47.self_attn.k_proj.bias": "model-00021-of-00029.safetensors",
518
+ "model.layers.47.self_attn.k_proj.weight": "model-00021-of-00029.safetensors",
519
+ "model.layers.47.self_attn.o_proj.weight": "model-00021-of-00029.safetensors",
520
+ "model.layers.47.self_attn.q_proj.bias": "model-00021-of-00029.safetensors",
521
+ "model.layers.47.self_attn.q_proj.weight": "model-00021-of-00029.safetensors",
522
+ "model.layers.47.self_attn.v_proj.bias": "model-00021-of-00029.safetensors",
523
+ "model.layers.47.self_attn.v_proj.weight": "model-00021-of-00029.safetensors",
524
+ "model.layers.48.input_layernorm.weight": "model-00022-of-00029.safetensors",
525
+ "model.layers.48.mlp.down_proj.weight": "model-00022-of-00029.safetensors",
526
+ "model.layers.48.mlp.gate_proj.weight": "model-00022-of-00029.safetensors",
527
+ "model.layers.48.mlp.up_proj.weight": "model-00022-of-00029.safetensors",
528
+ "model.layers.48.post_attention_layernorm.weight": "model-00022-of-00029.safetensors",
529
+ "model.layers.48.self_attn.k_proj.bias": "model-00022-of-00029.safetensors",
530
+ "model.layers.48.self_attn.k_proj.weight": "model-00022-of-00029.safetensors",
531
+ "model.layers.48.self_attn.o_proj.weight": "model-00022-of-00029.safetensors",
532
+ "model.layers.48.self_attn.q_proj.bias": "model-00022-of-00029.safetensors",
533
+ "model.layers.48.self_attn.q_proj.weight": "model-00022-of-00029.safetensors",
534
+ "model.layers.48.self_attn.v_proj.bias": "model-00022-of-00029.safetensors",
535
+ "model.layers.48.self_attn.v_proj.weight": "model-00022-of-00029.safetensors",
536
+ "model.layers.49.input_layernorm.weight": "model-00023-of-00029.safetensors",
537
+ "model.layers.49.mlp.down_proj.weight": "model-00023-of-00029.safetensors",
538
+ "model.layers.49.mlp.gate_proj.weight": "model-00022-of-00029.safetensors",
539
+ "model.layers.49.mlp.up_proj.weight": "model-00022-of-00029.safetensors",
540
+ "model.layers.49.post_attention_layernorm.weight": "model-00023-of-00029.safetensors",
541
+ "model.layers.49.self_attn.k_proj.bias": "model-00022-of-00029.safetensors",
542
+ "model.layers.49.self_attn.k_proj.weight": "model-00022-of-00029.safetensors",
543
+ "model.layers.49.self_attn.o_proj.weight": "model-00022-of-00029.safetensors",
544
+ "model.layers.49.self_attn.q_proj.bias": "model-00022-of-00029.safetensors",
545
+ "model.layers.49.self_attn.q_proj.weight": "model-00022-of-00029.safetensors",
546
+ "model.layers.49.self_attn.v_proj.bias": "model-00022-of-00029.safetensors",
547
+ "model.layers.49.self_attn.v_proj.weight": "model-00022-of-00029.safetensors",
548
+ "model.layers.5.input_layernorm.weight": "model-00004-of-00029.safetensors",
549
+ "model.layers.5.mlp.down_proj.weight": "model-00004-of-00029.safetensors",
550
+ "model.layers.5.mlp.gate_proj.weight": "model-00003-of-00029.safetensors",
551
+ "model.layers.5.mlp.up_proj.weight": "model-00004-of-00029.safetensors",
552
+ "model.layers.5.post_attention_layernorm.weight": "model-00004-of-00029.safetensors",
553
+ "model.layers.5.self_attn.k_proj.bias": "model-00003-of-00029.safetensors",
554
+ "model.layers.5.self_attn.k_proj.weight": "model-00003-of-00029.safetensors",
555
+ "model.layers.5.self_attn.o_proj.weight": "model-00003-of-00029.safetensors",
556
+ "model.layers.5.self_attn.q_proj.bias": "model-00003-of-00029.safetensors",
557
+ "model.layers.5.self_attn.q_proj.weight": "model-00003-of-00029.safetensors",
558
+ "model.layers.5.self_attn.v_proj.bias": "model-00003-of-00029.safetensors",
559
+ "model.layers.5.self_attn.v_proj.weight": "model-00003-of-00029.safetensors",
560
+ "model.layers.50.input_layernorm.weight": "model-00023-of-00029.safetensors",
561
+ "model.layers.50.mlp.down_proj.weight": "model-00023-of-00029.safetensors",
562
+ "model.layers.50.mlp.gate_proj.weight": "model-00023-of-00029.safetensors",
563
+ "model.layers.50.mlp.up_proj.weight": "model-00023-of-00029.safetensors",
564
+ "model.layers.50.post_attention_layernorm.weight": "model-00023-of-00029.safetensors",
565
+ "model.layers.50.self_attn.k_proj.bias": "model-00023-of-00029.safetensors",
566
+ "model.layers.50.self_attn.k_proj.weight": "model-00023-of-00029.safetensors",
567
+ "model.layers.50.self_attn.o_proj.weight": "model-00023-of-00029.safetensors",
568
+ "model.layers.50.self_attn.q_proj.bias": "model-00023-of-00029.safetensors",
569
+ "model.layers.50.self_attn.q_proj.weight": "model-00023-of-00029.safetensors",
570
+ "model.layers.50.self_attn.v_proj.bias": "model-00023-of-00029.safetensors",
571
+ "model.layers.50.self_attn.v_proj.weight": "model-00023-of-00029.safetensors",
572
+ "model.layers.51.input_layernorm.weight": "model-00023-of-00029.safetensors",
573
+ "model.layers.51.mlp.down_proj.weight": "model-00023-of-00029.safetensors",
574
+ "model.layers.51.mlp.gate_proj.weight": "model-00023-of-00029.safetensors",
575
+ "model.layers.51.mlp.up_proj.weight": "model-00023-of-00029.safetensors",
576
+ "model.layers.51.post_attention_layernorm.weight": "model-00023-of-00029.safetensors",
577
+ "model.layers.51.self_attn.k_proj.bias": "model-00023-of-00029.safetensors",
578
+ "model.layers.51.self_attn.k_proj.weight": "model-00023-of-00029.safetensors",
579
+ "model.layers.51.self_attn.o_proj.weight": "model-00023-of-00029.safetensors",
580
+ "model.layers.51.self_attn.q_proj.bias": "model-00023-of-00029.safetensors",
581
+ "model.layers.51.self_attn.q_proj.weight": "model-00023-of-00029.safetensors",
582
+ "model.layers.51.self_attn.v_proj.bias": "model-00023-of-00029.safetensors",
583
+ "model.layers.51.self_attn.v_proj.weight": "model-00023-of-00029.safetensors",
584
+ "model.layers.52.input_layernorm.weight": "model-00024-of-00029.safetensors",
585
+ "model.layers.52.mlp.down_proj.weight": "model-00024-of-00029.safetensors",
586
+ "model.layers.52.mlp.gate_proj.weight": "model-00024-of-00029.safetensors",
587
+ "model.layers.52.mlp.up_proj.weight": "model-00024-of-00029.safetensors",
588
+ "model.layers.52.post_attention_layernorm.weight": "model-00024-of-00029.safetensors",
589
+ "model.layers.52.self_attn.k_proj.bias": "model-00023-of-00029.safetensors",
590
+ "model.layers.52.self_attn.k_proj.weight": "model-00023-of-00029.safetensors",
591
+ "model.layers.52.self_attn.o_proj.weight": "model-00023-of-00029.safetensors",
592
+ "model.layers.52.self_attn.q_proj.bias": "model-00023-of-00029.safetensors",
593
+ "model.layers.52.self_attn.q_proj.weight": "model-00023-of-00029.safetensors",
594
+ "model.layers.52.self_attn.v_proj.bias": "model-00023-of-00029.safetensors",
595
+ "model.layers.52.self_attn.v_proj.weight": "model-00023-of-00029.safetensors",
596
+ "model.layers.53.input_layernorm.weight": "model-00024-of-00029.safetensors",
597
+ "model.layers.53.mlp.down_proj.weight": "model-00024-of-00029.safetensors",
598
+ "model.layers.53.mlp.gate_proj.weight": "model-00024-of-00029.safetensors",
599
+ "model.layers.53.mlp.up_proj.weight": "model-00024-of-00029.safetensors",
600
+ "model.layers.53.post_attention_layernorm.weight": "model-00024-of-00029.safetensors",
601
+ "model.layers.53.self_attn.k_proj.bias": "model-00024-of-00029.safetensors",
602
+ "model.layers.53.self_attn.k_proj.weight": "model-00024-of-00029.safetensors",
603
+ "model.layers.53.self_attn.o_proj.weight": "model-00024-of-00029.safetensors",
604
+ "model.layers.53.self_attn.q_proj.bias": "model-00024-of-00029.safetensors",
605
+ "model.layers.53.self_attn.q_proj.weight": "model-00024-of-00029.safetensors",
606
+ "model.layers.53.self_attn.v_proj.bias": "model-00024-of-00029.safetensors",
607
+ "model.layers.53.self_attn.v_proj.weight": "model-00024-of-00029.safetensors",
608
+ "model.layers.54.input_layernorm.weight": "model-00025-of-00029.safetensors",
609
+ "model.layers.54.mlp.down_proj.weight": "model-00025-of-00029.safetensors",
610
+ "model.layers.54.mlp.gate_proj.weight": "model-00024-of-00029.safetensors",
611
+ "model.layers.54.mlp.up_proj.weight": "model-00025-of-00029.safetensors",
612
+ "model.layers.54.post_attention_layernorm.weight": "model-00025-of-00029.safetensors",
613
+ "model.layers.54.self_attn.k_proj.bias": "model-00024-of-00029.safetensors",
614
+ "model.layers.54.self_attn.k_proj.weight": "model-00024-of-00029.safetensors",
615
+ "model.layers.54.self_attn.o_proj.weight": "model-00024-of-00029.safetensors",
616
+ "model.layers.54.self_attn.q_proj.bias": "model-00024-of-00029.safetensors",
617
+ "model.layers.54.self_attn.q_proj.weight": "model-00024-of-00029.safetensors",
618
+ "model.layers.54.self_attn.v_proj.bias": "model-00024-of-00029.safetensors",
619
+ "model.layers.54.self_attn.v_proj.weight": "model-00024-of-00029.safetensors",
620
+ "model.layers.55.input_layernorm.weight": "model-00025-of-00029.safetensors",
621
+ "model.layers.55.mlp.down_proj.weight": "model-00025-of-00029.safetensors",
622
+ "model.layers.55.mlp.gate_proj.weight": "model-00025-of-00029.safetensors",
623
+ "model.layers.55.mlp.up_proj.weight": "model-00025-of-00029.safetensors",
624
+ "model.layers.55.post_attention_layernorm.weight": "model-00025-of-00029.safetensors",
625
+ "model.layers.55.self_attn.k_proj.bias": "model-00025-of-00029.safetensors",
626
+ "model.layers.55.self_attn.k_proj.weight": "model-00025-of-00029.safetensors",
627
+ "model.layers.55.self_attn.o_proj.weight": "model-00025-of-00029.safetensors",
628
+ "model.layers.55.self_attn.q_proj.bias": "model-00025-of-00029.safetensors",
629
+ "model.layers.55.self_attn.q_proj.weight": "model-00025-of-00029.safetensors",
630
+ "model.layers.55.self_attn.v_proj.bias": "model-00025-of-00029.safetensors",
631
+ "model.layers.55.self_attn.v_proj.weight": "model-00025-of-00029.safetensors",
632
+ "model.layers.56.input_layernorm.weight": "model-00026-of-00029.safetensors",
633
+ "model.layers.56.mlp.down_proj.weight": "model-00026-of-00029.safetensors",
634
+ "model.layers.56.mlp.gate_proj.weight": "model-00025-of-00029.safetensors",
635
+ "model.layers.56.mlp.up_proj.weight": "model-00025-of-00029.safetensors",
636
+ "model.layers.56.post_attention_layernorm.weight": "model-00026-of-00029.safetensors",
637
+ "model.layers.56.self_attn.k_proj.bias": "model-00025-of-00029.safetensors",
638
+ "model.layers.56.self_attn.k_proj.weight": "model-00025-of-00029.safetensors",
639
+ "model.layers.56.self_attn.o_proj.weight": "model-00025-of-00029.safetensors",
640
+ "model.layers.56.self_attn.q_proj.bias": "model-00025-of-00029.safetensors",
641
+ "model.layers.56.self_attn.q_proj.weight": "model-00025-of-00029.safetensors",
642
+ "model.layers.56.self_attn.v_proj.bias": "model-00025-of-00029.safetensors",
643
+ "model.layers.56.self_attn.v_proj.weight": "model-00025-of-00029.safetensors",
644
+ "model.layers.57.input_layernorm.weight": "model-00026-of-00029.safetensors",
645
+ "model.layers.57.mlp.down_proj.weight": "model-00026-of-00029.safetensors",
646
+ "model.layers.57.mlp.gate_proj.weight": "model-00026-of-00029.safetensors",
647
+ "model.layers.57.mlp.up_proj.weight": "model-00026-of-00029.safetensors",
648
+ "model.layers.57.post_attention_layernorm.weight": "model-00026-of-00029.safetensors",
649
+ "model.layers.57.self_attn.k_proj.bias": "model-00026-of-00029.safetensors",
650
+ "model.layers.57.self_attn.k_proj.weight": "model-00026-of-00029.safetensors",
651
+ "model.layers.57.self_attn.o_proj.weight": "model-00026-of-00029.safetensors",
652
+ "model.layers.57.self_attn.q_proj.bias": "model-00026-of-00029.safetensors",
653
+ "model.layers.57.self_attn.q_proj.weight": "model-00026-of-00029.safetensors",
654
+ "model.layers.57.self_attn.v_proj.bias": "model-00026-of-00029.safetensors",
655
+ "model.layers.57.self_attn.v_proj.weight": "model-00026-of-00029.safetensors",
656
+ "model.layers.58.input_layernorm.weight": "model-00026-of-00029.safetensors",
657
+ "model.layers.58.mlp.down_proj.weight": "model-00026-of-00029.safetensors",
658
+ "model.layers.58.mlp.gate_proj.weight": "model-00026-of-00029.safetensors",
659
+ "model.layers.58.mlp.up_proj.weight": "model-00026-of-00029.safetensors",
660
+ "model.layers.58.post_attention_layernorm.weight": "model-00026-of-00029.safetensors",
661
+ "model.layers.58.self_attn.k_proj.bias": "model-00026-of-00029.safetensors",
662
+ "model.layers.58.self_attn.k_proj.weight": "model-00026-of-00029.safetensors",
663
+ "model.layers.58.self_attn.o_proj.weight": "model-00026-of-00029.safetensors",
664
+ "model.layers.58.self_attn.q_proj.bias": "model-00026-of-00029.safetensors",
665
+ "model.layers.58.self_attn.q_proj.weight": "model-00026-of-00029.safetensors",
666
+ "model.layers.58.self_attn.v_proj.bias": "model-00026-of-00029.safetensors",
667
+ "model.layers.58.self_attn.v_proj.weight": "model-00026-of-00029.safetensors",
668
+ "model.layers.59.input_layernorm.weight": "model-00027-of-00029.safetensors",
669
+ "model.layers.59.mlp.down_proj.weight": "model-00027-of-00029.safetensors",
670
+ "model.layers.59.mlp.gate_proj.weight": "model-00027-of-00029.safetensors",
671
+ "model.layers.59.mlp.up_proj.weight": "model-00027-of-00029.safetensors",
672
+ "model.layers.59.post_attention_layernorm.weight": "model-00027-of-00029.safetensors",
673
+ "model.layers.59.self_attn.k_proj.bias": "model-00026-of-00029.safetensors",
674
+ "model.layers.59.self_attn.k_proj.weight": "model-00026-of-00029.safetensors",
675
+ "model.layers.59.self_attn.o_proj.weight": "model-00026-of-00029.safetensors",
676
+ "model.layers.59.self_attn.q_proj.bias": "model-00026-of-00029.safetensors",
677
+ "model.layers.59.self_attn.q_proj.weight": "model-00026-of-00029.safetensors",
678
+ "model.layers.59.self_attn.v_proj.bias": "model-00026-of-00029.safetensors",
679
+ "model.layers.59.self_attn.v_proj.weight": "model-00026-of-00029.safetensors",
680
+ "model.layers.6.input_layernorm.weight": "model-00004-of-00029.safetensors",
681
+ "model.layers.6.mlp.down_proj.weight": "model-00004-of-00029.safetensors",
682
+ "model.layers.6.mlp.gate_proj.weight": "model-00004-of-00029.safetensors",
683
+ "model.layers.6.mlp.up_proj.weight": "model-00004-of-00029.safetensors",
684
+ "model.layers.6.post_attention_layernorm.weight": "model-00004-of-00029.safetensors",
685
+ "model.layers.6.self_attn.k_proj.bias": "model-00004-of-00029.safetensors",
686
+ "model.layers.6.self_attn.k_proj.weight": "model-00004-of-00029.safetensors",
687
+ "model.layers.6.self_attn.o_proj.weight": "model-00004-of-00029.safetensors",
688
+ "model.layers.6.self_attn.q_proj.bias": "model-00004-of-00029.safetensors",
689
+ "model.layers.6.self_attn.q_proj.weight": "model-00004-of-00029.safetensors",
690
+ "model.layers.6.self_attn.v_proj.bias": "model-00004-of-00029.safetensors",
691
+ "model.layers.6.self_attn.v_proj.weight": "model-00004-of-00029.safetensors",
692
+ "model.layers.60.input_layernorm.weight": "model-00027-of-00029.safetensors",
693
+ "model.layers.60.mlp.down_proj.weight": "model-00027-of-00029.safetensors",
694
+ "model.layers.60.mlp.gate_proj.weight": "model-00027-of-00029.safetensors",
695
+ "model.layers.60.mlp.up_proj.weight": "model-00027-of-00029.safetensors",
696
+ "model.layers.60.post_attention_layernorm.weight": "model-00027-of-00029.safetensors",
697
+ "model.layers.60.self_attn.k_proj.bias": "model-00027-of-00029.safetensors",
698
+ "model.layers.60.self_attn.k_proj.weight": "model-00027-of-00029.safetensors",
699
+ "model.layers.60.self_attn.o_proj.weight": "model-00027-of-00029.safetensors",
700
+ "model.layers.60.self_attn.q_proj.bias": "model-00027-of-00029.safetensors",
701
+ "model.layers.60.self_attn.q_proj.weight": "model-00027-of-00029.safetensors",
702
+ "model.layers.60.self_attn.v_proj.bias": "model-00027-of-00029.safetensors",
703
+ "model.layers.60.self_attn.v_proj.weight": "model-00027-of-00029.safetensors",
704
+ "model.layers.61.input_layernorm.weight": "model-00028-of-00029.safetensors",
705
+ "model.layers.61.mlp.down_proj.weight": "model-00028-of-00029.safetensors",
706
+ "model.layers.61.mlp.gate_proj.weight": "model-00027-of-00029.safetensors",
707
+ "model.layers.61.mlp.up_proj.weight": "model-00028-of-00029.safetensors",
708
+ "model.layers.61.post_attention_layernorm.weight": "model-00028-of-00029.safetensors",
709
+ "model.layers.61.self_attn.k_proj.bias": "model-00027-of-00029.safetensors",
710
+ "model.layers.61.self_attn.k_proj.weight": "model-00027-of-00029.safetensors",
711
+ "model.layers.61.self_attn.o_proj.weight": "model-00027-of-00029.safetensors",
712
+ "model.layers.61.self_attn.q_proj.bias": "model-00027-of-00029.safetensors",
713
+ "model.layers.61.self_attn.q_proj.weight": "model-00027-of-00029.safetensors",
714
+ "model.layers.61.self_attn.v_proj.bias": "model-00027-of-00029.safetensors",
715
+ "model.layers.61.self_attn.v_proj.weight": "model-00027-of-00029.safetensors",
716
+ "model.layers.62.input_layernorm.weight": "model-00028-of-00029.safetensors",
717
+ "model.layers.62.mlp.down_proj.weight": "model-00028-of-00029.safetensors",
718
+ "model.layers.62.mlp.gate_proj.weight": "model-00028-of-00029.safetensors",
719
+ "model.layers.62.mlp.up_proj.weight": "model-00028-of-00029.safetensors",
720
+ "model.layers.62.post_attention_layernorm.weight": "model-00028-of-00029.safetensors",
721
+ "model.layers.62.self_attn.k_proj.bias": "model-00028-of-00029.safetensors",
722
+ "model.layers.62.self_attn.k_proj.weight": "model-00028-of-00029.safetensors",
723
+ "model.layers.62.self_attn.o_proj.weight": "model-00028-of-00029.safetensors",
724
+ "model.layers.62.self_attn.q_proj.bias": "model-00028-of-00029.safetensors",
725
+ "model.layers.62.self_attn.q_proj.weight": "model-00028-of-00029.safetensors",
726
+ "model.layers.62.self_attn.v_proj.bias": "model-00028-of-00029.safetensors",
727
+ "model.layers.62.self_attn.v_proj.weight": "model-00028-of-00029.safetensors",
728
+ "model.layers.63.input_layernorm.weight": "model-00029-of-00029.safetensors",
729
+ "model.layers.63.mlp.down_proj.weight": "model-00029-of-00029.safetensors",
730
+ "model.layers.63.mlp.gate_proj.weight": "model-00028-of-00029.safetensors",
731
+ "model.layers.63.mlp.up_proj.weight": "model-00028-of-00029.safetensors",
732
+ "model.layers.63.post_attention_layernorm.weight": "model-00029-of-00029.safetensors",
733
+ "model.layers.63.self_attn.k_proj.bias": "model-00028-of-00029.safetensors",
734
+ "model.layers.63.self_attn.k_proj.weight": "model-00028-of-00029.safetensors",
735
+ "model.layers.63.self_attn.o_proj.weight": "model-00028-of-00029.safetensors",
736
+ "model.layers.63.self_attn.q_proj.bias": "model-00028-of-00029.safetensors",
737
+ "model.layers.63.self_attn.q_proj.weight": "model-00028-of-00029.safetensors",
738
+ "model.layers.63.self_attn.v_proj.bias": "model-00028-of-00029.safetensors",
739
+ "model.layers.63.self_attn.v_proj.weight": "model-00028-of-00029.safetensors",
740
+ "model.layers.7.input_layernorm.weight": "model-00005-of-00029.safetensors",
741
+ "model.layers.7.mlp.down_proj.weight": "model-00005-of-00029.safetensors",
742
+ "model.layers.7.mlp.gate_proj.weight": "model-00004-of-00029.safetensors",
743
+ "model.layers.7.mlp.up_proj.weight": "model-00004-of-00029.safetensors",
744
+ "model.layers.7.post_attention_layernorm.weight": "model-00005-of-00029.safetensors",
745
+ "model.layers.7.self_attn.k_proj.bias": "model-00004-of-00029.safetensors",
746
+ "model.layers.7.self_attn.k_proj.weight": "model-00004-of-00029.safetensors",
747
+ "model.layers.7.self_attn.o_proj.weight": "model-00004-of-00029.safetensors",
748
+ "model.layers.7.self_attn.q_proj.bias": "model-00004-of-00029.safetensors",
749
+ "model.layers.7.self_attn.q_proj.weight": "model-00004-of-00029.safetensors",
750
+ "model.layers.7.self_attn.v_proj.bias": "model-00004-of-00029.safetensors",
751
+ "model.layers.7.self_attn.v_proj.weight": "model-00004-of-00029.safetensors",
752
+ "model.layers.8.input_layernorm.weight": "model-00005-of-00029.safetensors",
753
+ "model.layers.8.mlp.down_proj.weight": "model-00005-of-00029.safetensors",
754
+ "model.layers.8.mlp.gate_proj.weight": "model-00005-of-00029.safetensors",
755
+ "model.layers.8.mlp.up_proj.weight": "model-00005-of-00029.safetensors",
756
+ "model.layers.8.post_attention_layernorm.weight": "model-00005-of-00029.safetensors",
757
+ "model.layers.8.self_attn.k_proj.bias": "model-00005-of-00029.safetensors",
758
+ "model.layers.8.self_attn.k_proj.weight": "model-00005-of-00029.safetensors",
759
+ "model.layers.8.self_attn.o_proj.weight": "model-00005-of-00029.safetensors",
760
+ "model.layers.8.self_attn.q_proj.bias": "model-00005-of-00029.safetensors",
761
+ "model.layers.8.self_attn.q_proj.weight": "model-00005-of-00029.safetensors",
762
+ "model.layers.8.self_attn.v_proj.bias": "model-00005-of-00029.safetensors",
763
+ "model.layers.8.self_attn.v_proj.weight": "model-00005-of-00029.safetensors",
764
+ "model.layers.9.input_layernorm.weight": "model-00005-of-00029.safetensors",
765
+ "model.layers.9.mlp.down_proj.weight": "model-00005-of-00029.safetensors",
766
+ "model.layers.9.mlp.gate_proj.weight": "model-00005-of-00029.safetensors",
767
+ "model.layers.9.mlp.up_proj.weight": "model-00005-of-00029.safetensors",
768
+ "model.layers.9.post_attention_layernorm.weight": "model-00005-of-00029.safetensors",
769
+ "model.layers.9.self_attn.k_proj.bias": "model-00005-of-00029.safetensors",
770
+ "model.layers.9.self_attn.k_proj.weight": "model-00005-of-00029.safetensors",
771
+ "model.layers.9.self_attn.o_proj.weight": "model-00005-of-00029.safetensors",
772
+ "model.layers.9.self_attn.q_proj.bias": "model-00005-of-00029.safetensors",
773
+ "model.layers.9.self_attn.q_proj.weight": "model-00005-of-00029.safetensors",
774
+ "model.layers.9.self_attn.v_proj.bias": "model-00005-of-00029.safetensors",
775
+ "model.layers.9.self_attn.v_proj.weight": "model-00005-of-00029.safetensors",
776
+ "model.norm.weight": "model-00029-of-00029.safetensors"
777
+ }
778
+ }
checkpoint-488/trainer_state.json ADDED
@@ -0,0 +1,3449 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 2.0,
5
+ "eval_steps": 500,
6
+ "global_step": 488,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.004098360655737705,
13
+ "grad_norm": 4500.83056640625,
14
+ "learning_rate": 2.0000000000000002e-07,
15
+ "loss": 2.2104,
16
+ "step": 1
17
+ },
18
+ {
19
+ "epoch": 0.00819672131147541,
20
+ "grad_norm": 397.61431884765625,
21
+ "learning_rate": 4.0000000000000003e-07,
22
+ "loss": 2.1402,
23
+ "step": 2
24
+ },
25
+ {
26
+ "epoch": 0.012295081967213115,
27
+ "grad_norm": 3.9326436519622803,
28
+ "learning_rate": 6.000000000000001e-07,
29
+ "loss": 1.8288,
30
+ "step": 3
31
+ },
32
+ {
33
+ "epoch": 0.01639344262295082,
34
+ "grad_norm": 42.71533966064453,
35
+ "learning_rate": 8.000000000000001e-07,
36
+ "loss": 2.121,
37
+ "step": 4
38
+ },
39
+ {
40
+ "epoch": 0.020491803278688523,
41
+ "grad_norm": 2230.4404296875,
42
+ "learning_rate": 1.0000000000000002e-06,
43
+ "loss": 2.507,
44
+ "step": 5
45
+ },
46
+ {
47
+ "epoch": 0.02459016393442623,
48
+ "grad_norm": 53.295406341552734,
49
+ "learning_rate": 1.2000000000000002e-06,
50
+ "loss": 2.0578,
51
+ "step": 6
52
+ },
53
+ {
54
+ "epoch": 0.028688524590163935,
55
+ "grad_norm": 3.917754650115967,
56
+ "learning_rate": 1.4000000000000001e-06,
57
+ "loss": 1.8235,
58
+ "step": 7
59
+ },
60
+ {
61
+ "epoch": 0.03278688524590164,
62
+ "grad_norm": 306.2715759277344,
63
+ "learning_rate": 1.6000000000000001e-06,
64
+ "loss": 2.2922,
65
+ "step": 8
66
+ },
67
+ {
68
+ "epoch": 0.036885245901639344,
69
+ "grad_norm": 367.69854736328125,
70
+ "learning_rate": 1.8000000000000001e-06,
71
+ "loss": 1.8594,
72
+ "step": 9
73
+ },
74
+ {
75
+ "epoch": 0.040983606557377046,
76
+ "grad_norm": 758.4432373046875,
77
+ "learning_rate": 2.0000000000000003e-06,
78
+ "loss": 1.9712,
79
+ "step": 10
80
+ },
81
+ {
82
+ "epoch": 0.045081967213114756,
83
+ "grad_norm": 62.54020690917969,
84
+ "learning_rate": 2.2e-06,
85
+ "loss": 1.7428,
86
+ "step": 11
87
+ },
88
+ {
89
+ "epoch": 0.04918032786885246,
90
+ "grad_norm": 3.542401075363159,
91
+ "learning_rate": 2.4000000000000003e-06,
92
+ "loss": 1.7951,
93
+ "step": 12
94
+ },
95
+ {
96
+ "epoch": 0.05327868852459016,
97
+ "grad_norm": 874.4238891601562,
98
+ "learning_rate": 2.6e-06,
99
+ "loss": 1.7743,
100
+ "step": 13
101
+ },
102
+ {
103
+ "epoch": 0.05737704918032787,
104
+ "grad_norm": 276.30157470703125,
105
+ "learning_rate": 2.8000000000000003e-06,
106
+ "loss": 1.7664,
107
+ "step": 14
108
+ },
109
+ {
110
+ "epoch": 0.06147540983606557,
111
+ "grad_norm": 2.58331561088562,
112
+ "learning_rate": 3e-06,
113
+ "loss": 1.7405,
114
+ "step": 15
115
+ },
116
+ {
117
+ "epoch": 0.06557377049180328,
118
+ "grad_norm": 2.7265381813049316,
119
+ "learning_rate": 3.2000000000000003e-06,
120
+ "loss": 1.7416,
121
+ "step": 16
122
+ },
123
+ {
124
+ "epoch": 0.06967213114754098,
125
+ "grad_norm": 306.4642333984375,
126
+ "learning_rate": 3.4000000000000005e-06,
127
+ "loss": 1.6298,
128
+ "step": 17
129
+ },
130
+ {
131
+ "epoch": 0.07377049180327869,
132
+ "grad_norm": 2.506108522415161,
133
+ "learning_rate": 3.6000000000000003e-06,
134
+ "loss": 1.6869,
135
+ "step": 18
136
+ },
137
+ {
138
+ "epoch": 0.0778688524590164,
139
+ "grad_norm": 2880.666015625,
140
+ "learning_rate": 3.8000000000000005e-06,
141
+ "loss": 1.4786,
142
+ "step": 19
143
+ },
144
+ {
145
+ "epoch": 0.08196721311475409,
146
+ "grad_norm": 1.6825494766235352,
147
+ "learning_rate": 4.000000000000001e-06,
148
+ "loss": 1.6626,
149
+ "step": 20
150
+ },
151
+ {
152
+ "epoch": 0.0860655737704918,
153
+ "grad_norm": 108.04601287841797,
154
+ "learning_rate": 4.2000000000000004e-06,
155
+ "loss": 1.523,
156
+ "step": 21
157
+ },
158
+ {
159
+ "epoch": 0.09016393442622951,
160
+ "grad_norm": 87.35586547851562,
161
+ "learning_rate": 4.4e-06,
162
+ "loss": 1.402,
163
+ "step": 22
164
+ },
165
+ {
166
+ "epoch": 0.0942622950819672,
167
+ "grad_norm": 1.2605695724487305,
168
+ "learning_rate": 4.600000000000001e-06,
169
+ "loss": 1.5923,
170
+ "step": 23
171
+ },
172
+ {
173
+ "epoch": 0.09836065573770492,
174
+ "grad_norm": 126.99231719970703,
175
+ "learning_rate": 4.800000000000001e-06,
176
+ "loss": 1.4233,
177
+ "step": 24
178
+ },
179
+ {
180
+ "epoch": 0.10245901639344263,
181
+ "grad_norm": 1.4333895444869995,
182
+ "learning_rate": 5e-06,
183
+ "loss": 1.5595,
184
+ "step": 25
185
+ },
186
+ {
187
+ "epoch": 0.10655737704918032,
188
+ "grad_norm": 31.390945434570312,
189
+ "learning_rate": 4.999942449896355e-06,
190
+ "loss": 1.2143,
191
+ "step": 26
192
+ },
193
+ {
194
+ "epoch": 0.11065573770491803,
195
+ "grad_norm": 26.53938865661621,
196
+ "learning_rate": 4.999769802235034e-06,
197
+ "loss": 1.3683,
198
+ "step": 27
199
+ },
200
+ {
201
+ "epoch": 0.11475409836065574,
202
+ "grad_norm": 3.3473479747772217,
203
+ "learning_rate": 4.999482064964746e-06,
204
+ "loss": 1.3561,
205
+ "step": 28
206
+ },
207
+ {
208
+ "epoch": 0.11885245901639344,
209
+ "grad_norm": 1.256771445274353,
210
+ "learning_rate": 4.999079251332942e-06,
211
+ "loss": 1.4949,
212
+ "step": 29
213
+ },
214
+ {
215
+ "epoch": 0.12295081967213115,
216
+ "grad_norm": 188.88003540039062,
217
+ "learning_rate": 4.998561379885193e-06,
218
+ "loss": 1.3125,
219
+ "step": 30
220
+ },
221
+ {
222
+ "epoch": 0.12704918032786885,
223
+ "grad_norm": 2.2714405059814453,
224
+ "learning_rate": 4.997928474464344e-06,
225
+ "loss": 1.3107,
226
+ "step": 31
227
+ },
228
+ {
229
+ "epoch": 0.13114754098360656,
230
+ "grad_norm": 1.0286980867385864,
231
+ "learning_rate": 4.997180564209414e-06,
232
+ "loss": 1.4724,
233
+ "step": 32
234
+ },
235
+ {
236
+ "epoch": 0.13524590163934427,
237
+ "grad_norm": 12.763178825378418,
238
+ "learning_rate": 4.996317683554252e-06,
239
+ "loss": 1.2771,
240
+ "step": 33
241
+ },
242
+ {
243
+ "epoch": 0.13934426229508196,
244
+ "grad_norm": 0.8430377840995789,
245
+ "learning_rate": 4.9953398722259546e-06,
246
+ "loss": 1.4282,
247
+ "step": 34
248
+ },
249
+ {
250
+ "epoch": 0.14344262295081966,
251
+ "grad_norm": 0.7248494029045105,
252
+ "learning_rate": 4.994247175243038e-06,
253
+ "loss": 1.4255,
254
+ "step": 35
255
+ },
256
+ {
257
+ "epoch": 0.14754098360655737,
258
+ "grad_norm": 2.6341772079467773,
259
+ "learning_rate": 4.993039642913361e-06,
260
+ "loss": 1.0992,
261
+ "step": 36
262
+ },
263
+ {
264
+ "epoch": 0.15163934426229508,
265
+ "grad_norm": 39.01789474487305,
266
+ "learning_rate": 4.991717330831813e-06,
267
+ "loss": 1.0712,
268
+ "step": 37
269
+ },
270
+ {
271
+ "epoch": 0.1557377049180328,
272
+ "grad_norm": 42.48202896118164,
273
+ "learning_rate": 4.990280299877749e-06,
274
+ "loss": 1.0842,
275
+ "step": 38
276
+ },
277
+ {
278
+ "epoch": 0.1598360655737705,
279
+ "grad_norm": 0.9557873010635376,
280
+ "learning_rate": 4.988728616212197e-06,
281
+ "loss": 1.2174,
282
+ "step": 39
283
+ },
284
+ {
285
+ "epoch": 0.16393442622950818,
286
+ "grad_norm": 1.3529239892959595,
287
+ "learning_rate": 4.9870623512748e-06,
288
+ "loss": 1.219,
289
+ "step": 40
290
+ },
291
+ {
292
+ "epoch": 0.1680327868852459,
293
+ "grad_norm": 0.8343617916107178,
294
+ "learning_rate": 4.985281581780532e-06,
295
+ "loss": 1.3546,
296
+ "step": 41
297
+ },
298
+ {
299
+ "epoch": 0.1721311475409836,
300
+ "grad_norm": 0.6154312491416931,
301
+ "learning_rate": 4.9833863897161715e-06,
302
+ "loss": 0.919,
303
+ "step": 42
304
+ },
305
+ {
306
+ "epoch": 0.1762295081967213,
307
+ "grad_norm": 0.657593846321106,
308
+ "learning_rate": 4.9813768623365164e-06,
309
+ "loss": 1.2144,
310
+ "step": 43
311
+ },
312
+ {
313
+ "epoch": 0.18032786885245902,
314
+ "grad_norm": 0.6682307124137878,
315
+ "learning_rate": 4.979253092160374e-06,
316
+ "loss": 1.2024,
317
+ "step": 44
318
+ },
319
+ {
320
+ "epoch": 0.18442622950819673,
321
+ "grad_norm": 0.5644441843032837,
322
+ "learning_rate": 4.9770151769663e-06,
323
+ "loss": 1.0372,
324
+ "step": 45
325
+ },
326
+ {
327
+ "epoch": 0.1885245901639344,
328
+ "grad_norm": 0.6462714076042175,
329
+ "learning_rate": 4.974663219788095e-06,
330
+ "loss": 1.3359,
331
+ "step": 46
332
+ },
333
+ {
334
+ "epoch": 0.19262295081967212,
335
+ "grad_norm": 0.40336307883262634,
336
+ "learning_rate": 4.972197328910063e-06,
337
+ "loss": 0.7266,
338
+ "step": 47
339
+ },
340
+ {
341
+ "epoch": 0.19672131147540983,
342
+ "grad_norm": 0.574533998966217,
343
+ "learning_rate": 4.969617617862023e-06,
344
+ "loss": 1.1839,
345
+ "step": 48
346
+ },
347
+ {
348
+ "epoch": 0.20081967213114754,
349
+ "grad_norm": 0.6555280089378357,
350
+ "learning_rate": 4.966924205414088e-06,
351
+ "loss": 1.3378,
352
+ "step": 49
353
+ },
354
+ {
355
+ "epoch": 0.20491803278688525,
356
+ "grad_norm": 0.6314173936843872,
357
+ "learning_rate": 4.964117215571188e-06,
358
+ "loss": 1.3412,
359
+ "step": 50
360
+ },
361
+ {
362
+ "epoch": 0.20901639344262296,
363
+ "grad_norm": 0.5218219757080078,
364
+ "learning_rate": 4.9611967775673694e-06,
365
+ "loss": 1.1575,
366
+ "step": 51
367
+ },
368
+ {
369
+ "epoch": 0.21311475409836064,
370
+ "grad_norm": 0.5445213317871094,
371
+ "learning_rate": 4.95816302585984e-06,
372
+ "loss": 1.1678,
373
+ "step": 52
374
+ },
375
+ {
376
+ "epoch": 0.21721311475409835,
377
+ "grad_norm": 0.44126632809638977,
378
+ "learning_rate": 4.9550161001227795e-06,
379
+ "loss": 1.1573,
380
+ "step": 53
381
+ },
382
+ {
383
+ "epoch": 0.22131147540983606,
384
+ "grad_norm": 0.5242781639099121,
385
+ "learning_rate": 4.95175614524091e-06,
386
+ "loss": 1.1674,
387
+ "step": 54
388
+ },
389
+ {
390
+ "epoch": 0.22540983606557377,
391
+ "grad_norm": 0.445451021194458,
392
+ "learning_rate": 4.9483833113028245e-06,
393
+ "loss": 0.8661,
394
+ "step": 55
395
+ },
396
+ {
397
+ "epoch": 0.22950819672131148,
398
+ "grad_norm": 0.5405259728431702,
399
+ "learning_rate": 4.944897753594078e-06,
400
+ "loss": 1.1522,
401
+ "step": 56
402
+ },
403
+ {
404
+ "epoch": 0.2336065573770492,
405
+ "grad_norm": 0.45289111137390137,
406
+ "learning_rate": 4.941299632590035e-06,
407
+ "loss": 1.1482,
408
+ "step": 57
409
+ },
410
+ {
411
+ "epoch": 0.23770491803278687,
412
+ "grad_norm": 0.45396503806114197,
413
+ "learning_rate": 4.937589113948485e-06,
414
+ "loss": 1.0029,
415
+ "step": 58
416
+ },
417
+ {
418
+ "epoch": 0.24180327868852458,
419
+ "grad_norm": 0.5364096164703369,
420
+ "learning_rate": 4.933766368502015e-06,
421
+ "loss": 1.1566,
422
+ "step": 59
423
+ },
424
+ {
425
+ "epoch": 0.2459016393442623,
426
+ "grad_norm": 0.44701170921325684,
427
+ "learning_rate": 4.929831572250142e-06,
428
+ "loss": 0.9908,
429
+ "step": 60
430
+ },
431
+ {
432
+ "epoch": 0.25,
433
+ "grad_norm": 0.4584234356880188,
434
+ "learning_rate": 4.925784906351212e-06,
435
+ "loss": 1.1334,
436
+ "step": 61
437
+ },
438
+ {
439
+ "epoch": 0.2540983606557377,
440
+ "grad_norm": 0.5813693404197693,
441
+ "learning_rate": 4.9216265571140565e-06,
442
+ "loss": 1.2994,
443
+ "step": 62
444
+ },
445
+ {
446
+ "epoch": 0.2581967213114754,
447
+ "grad_norm": 0.43892839550971985,
448
+ "learning_rate": 4.917356715989421e-06,
449
+ "loss": 1.1369,
450
+ "step": 63
451
+ },
452
+ {
453
+ "epoch": 0.26229508196721313,
454
+ "grad_norm": 0.4868036210536957,
455
+ "learning_rate": 4.912975579561146e-06,
456
+ "loss": 1.2841,
457
+ "step": 64
458
+ },
459
+ {
460
+ "epoch": 0.26639344262295084,
461
+ "grad_norm": 0.3914739787578583,
462
+ "learning_rate": 4.908483349537113e-06,
463
+ "loss": 0.9917,
464
+ "step": 65
465
+ },
466
+ {
467
+ "epoch": 0.27049180327868855,
468
+ "grad_norm": 0.4883388578891754,
469
+ "learning_rate": 4.903880232739967e-06,
470
+ "loss": 1.2944,
471
+ "step": 66
472
+ },
473
+ {
474
+ "epoch": 0.27459016393442626,
475
+ "grad_norm": 0.45264574885368347,
476
+ "learning_rate": 4.899166441097586e-06,
477
+ "loss": 1.1421,
478
+ "step": 67
479
+ },
480
+ {
481
+ "epoch": 0.2786885245901639,
482
+ "grad_norm": 0.4849913716316223,
483
+ "learning_rate": 4.8943421916333275e-06,
484
+ "loss": 1.2714,
485
+ "step": 68
486
+ },
487
+ {
488
+ "epoch": 0.2827868852459016,
489
+ "grad_norm": 0.4818207025527954,
490
+ "learning_rate": 4.889407706456039e-06,
491
+ "loss": 1.2748,
492
+ "step": 69
493
+ },
494
+ {
495
+ "epoch": 0.28688524590163933,
496
+ "grad_norm": 0.43514296412467957,
497
+ "learning_rate": 4.884363212749825e-06,
498
+ "loss": 1.1162,
499
+ "step": 70
500
+ },
501
+ {
502
+ "epoch": 0.29098360655737704,
503
+ "grad_norm": 0.448426753282547,
504
+ "learning_rate": 4.879208942763595e-06,
505
+ "loss": 1.1156,
506
+ "step": 71
507
+ },
508
+ {
509
+ "epoch": 0.29508196721311475,
510
+ "grad_norm": 0.5787432193756104,
511
+ "learning_rate": 4.8739451338003675e-06,
512
+ "loss": 1.2675,
513
+ "step": 72
514
+ },
515
+ {
516
+ "epoch": 0.29918032786885246,
517
+ "grad_norm": 0.5253551602363586,
518
+ "learning_rate": 4.868572028206342e-06,
519
+ "loss": 1.2693,
520
+ "step": 73
521
+ },
522
+ {
523
+ "epoch": 0.30327868852459017,
524
+ "grad_norm": 0.4342573285102844,
525
+ "learning_rate": 4.863089873359746e-06,
526
+ "loss": 1.1278,
527
+ "step": 74
528
+ },
529
+ {
530
+ "epoch": 0.3073770491803279,
531
+ "grad_norm": 0.4752373993396759,
532
+ "learning_rate": 4.857498921659443e-06,
533
+ "loss": 1.1225,
534
+ "step": 75
535
+ },
536
+ {
537
+ "epoch": 0.3114754098360656,
538
+ "grad_norm": 0.48717620968818665,
539
+ "learning_rate": 4.8517994305133135e-06,
540
+ "loss": 1.1154,
541
+ "step": 76
542
+ },
543
+ {
544
+ "epoch": 0.3155737704918033,
545
+ "grad_norm": 0.4144691228866577,
546
+ "learning_rate": 4.845991662326402e-06,
547
+ "loss": 0.9699,
548
+ "step": 77
549
+ },
550
+ {
551
+ "epoch": 0.319672131147541,
552
+ "grad_norm": 0.5019151568412781,
553
+ "learning_rate": 4.840075884488838e-06,
554
+ "loss": 1.1027,
555
+ "step": 78
556
+ },
557
+ {
558
+ "epoch": 0.3237704918032787,
559
+ "grad_norm": 0.4522932767868042,
560
+ "learning_rate": 4.834052369363522e-06,
561
+ "loss": 1.1246,
562
+ "step": 79
563
+ },
564
+ {
565
+ "epoch": 0.32786885245901637,
566
+ "grad_norm": 0.48509010672569275,
567
+ "learning_rate": 4.827921394273592e-06,
568
+ "loss": 1.2707,
569
+ "step": 80
570
+ },
571
+ {
572
+ "epoch": 0.3319672131147541,
573
+ "grad_norm": 0.4448527693748474,
574
+ "learning_rate": 4.821683241489648e-06,
575
+ "loss": 1.1272,
576
+ "step": 81
577
+ },
578
+ {
579
+ "epoch": 0.3360655737704918,
580
+ "grad_norm": 0.4774729609489441,
581
+ "learning_rate": 4.815338198216762e-06,
582
+ "loss": 1.1391,
583
+ "step": 82
584
+ },
585
+ {
586
+ "epoch": 0.3401639344262295,
587
+ "grad_norm": 0.626939058303833,
588
+ "learning_rate": 4.808886556581252e-06,
589
+ "loss": 1.2457,
590
+ "step": 83
591
+ },
592
+ {
593
+ "epoch": 0.3442622950819672,
594
+ "grad_norm": 0.4404904842376709,
595
+ "learning_rate": 4.802328613617234e-06,
596
+ "loss": 1.2538,
597
+ "step": 84
598
+ },
599
+ {
600
+ "epoch": 0.3483606557377049,
601
+ "grad_norm": 0.5053931474685669,
602
+ "learning_rate": 4.795664671252947e-06,
603
+ "loss": 1.1163,
604
+ "step": 85
605
+ },
606
+ {
607
+ "epoch": 0.3524590163934426,
608
+ "grad_norm": 0.5418112874031067,
609
+ "learning_rate": 4.788895036296848e-06,
610
+ "loss": 1.2381,
611
+ "step": 86
612
+ },
613
+ {
614
+ "epoch": 0.35655737704918034,
615
+ "grad_norm": 0.5010538697242737,
616
+ "learning_rate": 4.782020020423492e-06,
617
+ "loss": 1.2513,
618
+ "step": 87
619
+ },
620
+ {
621
+ "epoch": 0.36065573770491804,
622
+ "grad_norm": 0.4736756384372711,
623
+ "learning_rate": 4.775039940159182e-06,
624
+ "loss": 1.0977,
625
+ "step": 88
626
+ },
627
+ {
628
+ "epoch": 0.36475409836065575,
629
+ "grad_norm": 0.46083852648735046,
630
+ "learning_rate": 4.76795511686739e-06,
631
+ "loss": 1.1,
632
+ "step": 89
633
+ },
634
+ {
635
+ "epoch": 0.36885245901639346,
636
+ "grad_norm": 0.4447672367095947,
637
+ "learning_rate": 4.760765876733967e-06,
638
+ "loss": 1.2445,
639
+ "step": 90
640
+ },
641
+ {
642
+ "epoch": 0.3729508196721312,
643
+ "grad_norm": 0.44670748710632324,
644
+ "learning_rate": 4.753472550752128e-06,
645
+ "loss": 1.1072,
646
+ "step": 91
647
+ },
648
+ {
649
+ "epoch": 0.3770491803278688,
650
+ "grad_norm": 0.46701523661613464,
651
+ "learning_rate": 4.746075474707204e-06,
652
+ "loss": 1.2482,
653
+ "step": 92
654
+ },
655
+ {
656
+ "epoch": 0.38114754098360654,
657
+ "grad_norm": 0.47101953625679016,
658
+ "learning_rate": 4.738574989161189e-06,
659
+ "loss": 1.2585,
660
+ "step": 93
661
+ },
662
+ {
663
+ "epoch": 0.38524590163934425,
664
+ "grad_norm": 0.47528576850891113,
665
+ "learning_rate": 4.73097143943706e-06,
666
+ "loss": 1.2438,
667
+ "step": 94
668
+ },
669
+ {
670
+ "epoch": 0.38934426229508196,
671
+ "grad_norm": 0.4908735454082489,
672
+ "learning_rate": 4.723265175602877e-06,
673
+ "loss": 1.2403,
674
+ "step": 95
675
+ },
676
+ {
677
+ "epoch": 0.39344262295081966,
678
+ "grad_norm": 0.45603206753730774,
679
+ "learning_rate": 4.7154565524556655e-06,
680
+ "loss": 1.1163,
681
+ "step": 96
682
+ },
683
+ {
684
+ "epoch": 0.3975409836065574,
685
+ "grad_norm": 0.448993980884552,
686
+ "learning_rate": 4.707545929505083e-06,
687
+ "loss": 1.2372,
688
+ "step": 97
689
+ },
690
+ {
691
+ "epoch": 0.4016393442622951,
692
+ "grad_norm": 0.47324758768081665,
693
+ "learning_rate": 4.699533670956866e-06,
694
+ "loss": 1.2466,
695
+ "step": 98
696
+ },
697
+ {
698
+ "epoch": 0.4057377049180328,
699
+ "grad_norm": 0.4399038553237915,
700
+ "learning_rate": 4.691420145696062e-06,
701
+ "loss": 1.2268,
702
+ "step": 99
703
+ },
704
+ {
705
+ "epoch": 0.4098360655737705,
706
+ "grad_norm": 0.5177188515663147,
707
+ "learning_rate": 4.683205727270047e-06,
708
+ "loss": 1.0946,
709
+ "step": 100
710
+ },
711
+ {
712
+ "epoch": 0.4139344262295082,
713
+ "grad_norm": 0.49126148223876953,
714
+ "learning_rate": 4.674890793871327e-06,
715
+ "loss": 1.239,
716
+ "step": 101
717
+ },
718
+ {
719
+ "epoch": 0.4180327868852459,
720
+ "grad_norm": 0.44006577134132385,
721
+ "learning_rate": 4.666475728320124e-06,
722
+ "loss": 1.2425,
723
+ "step": 102
724
+ },
725
+ {
726
+ "epoch": 0.42213114754098363,
727
+ "grad_norm": 0.4243885278701782,
728
+ "learning_rate": 4.657960918046755e-06,
729
+ "loss": 0.9624,
730
+ "step": 103
731
+ },
732
+ {
733
+ "epoch": 0.4262295081967213,
734
+ "grad_norm": 0.5435347557067871,
735
+ "learning_rate": 4.64934675507379e-06,
736
+ "loss": 1.2487,
737
+ "step": 104
738
+ },
739
+ {
740
+ "epoch": 0.430327868852459,
741
+ "grad_norm": 0.45237934589385986,
742
+ "learning_rate": 4.6406336359980075e-06,
743
+ "loss": 1.2297,
744
+ "step": 105
745
+ },
746
+ {
747
+ "epoch": 0.4344262295081967,
748
+ "grad_norm": 0.37015944719314575,
749
+ "learning_rate": 4.631821961972131e-06,
750
+ "loss": 0.9531,
751
+ "step": 106
752
+ },
753
+ {
754
+ "epoch": 0.4385245901639344,
755
+ "grad_norm": 0.41605710983276367,
756
+ "learning_rate": 4.622912138686364e-06,
757
+ "loss": 1.1215,
758
+ "step": 107
759
+ },
760
+ {
761
+ "epoch": 0.4426229508196721,
762
+ "grad_norm": 0.41787323355674744,
763
+ "learning_rate": 4.613904576349709e-06,
764
+ "loss": 0.9494,
765
+ "step": 108
766
+ },
767
+ {
768
+ "epoch": 0.44672131147540983,
769
+ "grad_norm": 0.4963286221027374,
770
+ "learning_rate": 4.604799689671083e-06,
771
+ "loss": 1.242,
772
+ "step": 109
773
+ },
774
+ {
775
+ "epoch": 0.45081967213114754,
776
+ "grad_norm": 0.38304510712623596,
777
+ "learning_rate": 4.595597897840224e-06,
778
+ "loss": 0.935,
779
+ "step": 110
780
+ },
781
+ {
782
+ "epoch": 0.45491803278688525,
783
+ "grad_norm": 0.4239124059677124,
784
+ "learning_rate": 4.58629962450839e-06,
785
+ "loss": 1.0936,
786
+ "step": 111
787
+ },
788
+ {
789
+ "epoch": 0.45901639344262296,
790
+ "grad_norm": 0.440853089094162,
791
+ "learning_rate": 4.576905297768856e-06,
792
+ "loss": 1.1029,
793
+ "step": 112
794
+ },
795
+ {
796
+ "epoch": 0.46311475409836067,
797
+ "grad_norm": 0.35043877363204956,
798
+ "learning_rate": 4.567415350137206e-06,
799
+ "loss": 0.8043,
800
+ "step": 113
801
+ },
802
+ {
803
+ "epoch": 0.4672131147540984,
804
+ "grad_norm": 0.4636719226837158,
805
+ "learning_rate": 4.557830218531414e-06,
806
+ "loss": 1.2464,
807
+ "step": 114
808
+ },
809
+ {
810
+ "epoch": 0.4713114754098361,
811
+ "grad_norm": 0.44369155168533325,
812
+ "learning_rate": 4.548150344251735e-06,
813
+ "loss": 1.2288,
814
+ "step": 115
815
+ },
816
+ {
817
+ "epoch": 0.47540983606557374,
818
+ "grad_norm": 0.44172561168670654,
819
+ "learning_rate": 4.538376172960382e-06,
820
+ "loss": 1.0849,
821
+ "step": 116
822
+ },
823
+ {
824
+ "epoch": 0.47950819672131145,
825
+ "grad_norm": 0.44152316451072693,
826
+ "learning_rate": 4.528508154661013e-06,
827
+ "loss": 1.2429,
828
+ "step": 117
829
+ },
830
+ {
831
+ "epoch": 0.48360655737704916,
832
+ "grad_norm": 0.5085294842720032,
833
+ "learning_rate": 4.518546743678008e-06,
834
+ "loss": 1.0935,
835
+ "step": 118
836
+ },
837
+ {
838
+ "epoch": 0.48770491803278687,
839
+ "grad_norm": 0.4440874755382538,
840
+ "learning_rate": 4.508492398635556e-06,
841
+ "loss": 1.0804,
842
+ "step": 119
843
+ },
844
+ {
845
+ "epoch": 0.4918032786885246,
846
+ "grad_norm": 0.4199363589286804,
847
+ "learning_rate": 4.498345582436534e-06,
848
+ "loss": 1.0937,
849
+ "step": 120
850
+ },
851
+ {
852
+ "epoch": 0.4959016393442623,
853
+ "grad_norm": 0.4209626615047455,
854
+ "learning_rate": 4.488106762241202e-06,
855
+ "loss": 1.0989,
856
+ "step": 121
857
+ },
858
+ {
859
+ "epoch": 0.5,
860
+ "grad_norm": 0.4250634014606476,
861
+ "learning_rate": 4.477776409445692e-06,
862
+ "loss": 1.0778,
863
+ "step": 122
864
+ },
865
+ {
866
+ "epoch": 0.5040983606557377,
867
+ "grad_norm": 0.37539830803871155,
868
+ "learning_rate": 4.4673549996603025e-06,
869
+ "loss": 0.9401,
870
+ "step": 123
871
+ },
872
+ {
873
+ "epoch": 0.5081967213114754,
874
+ "grad_norm": 0.5044989585876465,
875
+ "learning_rate": 4.4568430126876036e-06,
876
+ "loss": 1.2288,
877
+ "step": 124
878
+ },
879
+ {
880
+ "epoch": 0.5122950819672131,
881
+ "grad_norm": 0.42166656255722046,
882
+ "learning_rate": 4.446240932500349e-06,
883
+ "loss": 1.0905,
884
+ "step": 125
885
+ },
886
+ {
887
+ "epoch": 0.5163934426229508,
888
+ "grad_norm": 0.46548137068748474,
889
+ "learning_rate": 4.435549247219187e-06,
890
+ "loss": 1.2341,
891
+ "step": 126
892
+ },
893
+ {
894
+ "epoch": 0.5204918032786885,
895
+ "grad_norm": 0.4220496714115143,
896
+ "learning_rate": 4.424768449090195e-06,
897
+ "loss": 1.0803,
898
+ "step": 127
899
+ },
900
+ {
901
+ "epoch": 0.5245901639344263,
902
+ "grad_norm": 0.4304647743701935,
903
+ "learning_rate": 4.413899034462215e-06,
904
+ "loss": 1.0746,
905
+ "step": 128
906
+ },
907
+ {
908
+ "epoch": 0.5286885245901639,
909
+ "grad_norm": 0.4345115125179291,
910
+ "learning_rate": 4.402941503763996e-06,
911
+ "loss": 1.0885,
912
+ "step": 129
913
+ },
914
+ {
915
+ "epoch": 0.5327868852459017,
916
+ "grad_norm": 0.37969139218330383,
917
+ "learning_rate": 4.391896361481158e-06,
918
+ "loss": 0.9346,
919
+ "step": 130
920
+ },
921
+ {
922
+ "epoch": 0.5368852459016393,
923
+ "grad_norm": 0.4811060130596161,
924
+ "learning_rate": 4.380764116132972e-06,
925
+ "loss": 1.1062,
926
+ "step": 131
927
+ },
928
+ {
929
+ "epoch": 0.5409836065573771,
930
+ "grad_norm": 0.43868470191955566,
931
+ "learning_rate": 4.369545280248932e-06,
932
+ "loss": 1.075,
933
+ "step": 132
934
+ },
935
+ {
936
+ "epoch": 0.5450819672131147,
937
+ "grad_norm": 0.5028568506240845,
938
+ "learning_rate": 4.358240370345177e-06,
939
+ "loss": 1.2363,
940
+ "step": 133
941
+ },
942
+ {
943
+ "epoch": 0.5491803278688525,
944
+ "grad_norm": 0.4681834876537323,
945
+ "learning_rate": 4.346849906900693e-06,
946
+ "loss": 1.2304,
947
+ "step": 134
948
+ },
949
+ {
950
+ "epoch": 0.5532786885245902,
951
+ "grad_norm": 0.434079647064209,
952
+ "learning_rate": 4.335374414333362e-06,
953
+ "loss": 0.9546,
954
+ "step": 135
955
+ },
956
+ {
957
+ "epoch": 0.5573770491803278,
958
+ "grad_norm": 0.49351316690444946,
959
+ "learning_rate": 4.323814420975815e-06,
960
+ "loss": 1.092,
961
+ "step": 136
962
+ },
963
+ {
964
+ "epoch": 0.5614754098360656,
965
+ "grad_norm": 0.4992028772830963,
966
+ "learning_rate": 4.312170459051103e-06,
967
+ "loss": 1.2141,
968
+ "step": 137
969
+ },
970
+ {
971
+ "epoch": 0.5655737704918032,
972
+ "grad_norm": 0.44842249155044556,
973
+ "learning_rate": 4.300443064648198e-06,
974
+ "loss": 1.0836,
975
+ "step": 138
976
+ },
977
+ {
978
+ "epoch": 0.569672131147541,
979
+ "grad_norm": 0.48587048053741455,
980
+ "learning_rate": 4.288632777697313e-06,
981
+ "loss": 1.2317,
982
+ "step": 139
983
+ },
984
+ {
985
+ "epoch": 0.5737704918032787,
986
+ "grad_norm": 0.4601718783378601,
987
+ "learning_rate": 4.276740141945035e-06,
988
+ "loss": 1.0736,
989
+ "step": 140
990
+ },
991
+ {
992
+ "epoch": 0.5778688524590164,
993
+ "grad_norm": 0.4077562987804413,
994
+ "learning_rate": 4.264765704929305e-06,
995
+ "loss": 1.0838,
996
+ "step": 141
997
+ },
998
+ {
999
+ "epoch": 0.5819672131147541,
1000
+ "grad_norm": 0.3944692611694336,
1001
+ "learning_rate": 4.252710017954191e-06,
1002
+ "loss": 0.928,
1003
+ "step": 142
1004
+ },
1005
+ {
1006
+ "epoch": 0.5860655737704918,
1007
+ "grad_norm": 0.5090078115463257,
1008
+ "learning_rate": 4.240573636064525e-06,
1009
+ "loss": 1.2291,
1010
+ "step": 143
1011
+ },
1012
+ {
1013
+ "epoch": 0.5901639344262295,
1014
+ "grad_norm": 0.5023549199104309,
1015
+ "learning_rate": 4.228357118020332e-06,
1016
+ "loss": 1.2277,
1017
+ "step": 144
1018
+ },
1019
+ {
1020
+ "epoch": 0.5942622950819673,
1021
+ "grad_norm": 0.42317765951156616,
1022
+ "learning_rate": 4.216061026271119e-06,
1023
+ "loss": 1.1034,
1024
+ "step": 145
1025
+ },
1026
+ {
1027
+ "epoch": 0.5983606557377049,
1028
+ "grad_norm": 0.4723348021507263,
1029
+ "learning_rate": 4.203685926929968e-06,
1030
+ "loss": 1.2311,
1031
+ "step": 146
1032
+ },
1033
+ {
1034
+ "epoch": 0.6024590163934426,
1035
+ "grad_norm": 0.41492870450019836,
1036
+ "learning_rate": 4.191232389747477e-06,
1037
+ "loss": 1.0872,
1038
+ "step": 147
1039
+ },
1040
+ {
1041
+ "epoch": 0.6065573770491803,
1042
+ "grad_norm": 0.45762187242507935,
1043
+ "learning_rate": 4.178700988085534e-06,
1044
+ "loss": 1.2289,
1045
+ "step": 148
1046
+ },
1047
+ {
1048
+ "epoch": 0.610655737704918,
1049
+ "grad_norm": 0.39948973059654236,
1050
+ "learning_rate": 4.166092298890909e-06,
1051
+ "loss": 0.9464,
1052
+ "step": 149
1053
+ },
1054
+ {
1055
+ "epoch": 0.6147540983606558,
1056
+ "grad_norm": 0.4192037880420685,
1057
+ "learning_rate": 4.1534069026686975e-06,
1058
+ "loss": 1.0886,
1059
+ "step": 150
1060
+ },
1061
+ {
1062
+ "epoch": 0.6188524590163934,
1063
+ "grad_norm": 0.4573190212249756,
1064
+ "learning_rate": 4.140645383455593e-06,
1065
+ "loss": 0.9762,
1066
+ "step": 151
1067
+ },
1068
+ {
1069
+ "epoch": 0.6229508196721312,
1070
+ "grad_norm": 0.5367359519004822,
1071
+ "learning_rate": 4.127808328793e-06,
1072
+ "loss": 1.2318,
1073
+ "step": 152
1074
+ },
1075
+ {
1076
+ "epoch": 0.6270491803278688,
1077
+ "grad_norm": 0.40376120805740356,
1078
+ "learning_rate": 4.114896329699979e-06,
1079
+ "loss": 1.0804,
1080
+ "step": 153
1081
+ },
1082
+ {
1083
+ "epoch": 0.6311475409836066,
1084
+ "grad_norm": 0.4679541289806366,
1085
+ "learning_rate": 4.101909980646037e-06,
1086
+ "loss": 1.2113,
1087
+ "step": 154
1088
+ },
1089
+ {
1090
+ "epoch": 0.6352459016393442,
1091
+ "grad_norm": 0.4209458529949188,
1092
+ "learning_rate": 4.088849879523763e-06,
1093
+ "loss": 1.0744,
1094
+ "step": 155
1095
+ },
1096
+ {
1097
+ "epoch": 0.639344262295082,
1098
+ "grad_norm": 0.430154025554657,
1099
+ "learning_rate": 4.075716627621295e-06,
1100
+ "loss": 1.0781,
1101
+ "step": 156
1102
+ },
1103
+ {
1104
+ "epoch": 0.6434426229508197,
1105
+ "grad_norm": 0.53505539894104,
1106
+ "learning_rate": 4.062510829594641e-06,
1107
+ "loss": 1.2121,
1108
+ "step": 157
1109
+ },
1110
+ {
1111
+ "epoch": 0.6475409836065574,
1112
+ "grad_norm": 0.4071391224861145,
1113
+ "learning_rate": 4.049233093439834e-06,
1114
+ "loss": 1.0753,
1115
+ "step": 158
1116
+ },
1117
+ {
1118
+ "epoch": 0.6516393442622951,
1119
+ "grad_norm": 0.35387763381004333,
1120
+ "learning_rate": 4.035884030464951e-06,
1121
+ "loss": 0.9405,
1122
+ "step": 159
1123
+ },
1124
+ {
1125
+ "epoch": 0.6557377049180327,
1126
+ "grad_norm": 0.4813622534275055,
1127
+ "learning_rate": 4.022464255261956e-06,
1128
+ "loss": 1.0739,
1129
+ "step": 160
1130
+ },
1131
+ {
1132
+ "epoch": 0.6598360655737705,
1133
+ "grad_norm": 0.38495829701423645,
1134
+ "learning_rate": 4.008974385678412e-06,
1135
+ "loss": 0.9311,
1136
+ "step": 161
1137
+ },
1138
+ {
1139
+ "epoch": 0.6639344262295082,
1140
+ "grad_norm": 0.41135096549987793,
1141
+ "learning_rate": 3.995415042789034e-06,
1142
+ "loss": 0.9564,
1143
+ "step": 162
1144
+ },
1145
+ {
1146
+ "epoch": 0.6680327868852459,
1147
+ "grad_norm": 0.37345942854881287,
1148
+ "learning_rate": 3.9817868508670925e-06,
1149
+ "loss": 0.9599,
1150
+ "step": 163
1151
+ },
1152
+ {
1153
+ "epoch": 0.6721311475409836,
1154
+ "grad_norm": 0.40914052724838257,
1155
+ "learning_rate": 3.9680904373556735e-06,
1156
+ "loss": 1.075,
1157
+ "step": 164
1158
+ },
1159
+ {
1160
+ "epoch": 0.6762295081967213,
1161
+ "grad_norm": 0.4219646453857422,
1162
+ "learning_rate": 3.954326432838792e-06,
1163
+ "loss": 0.9285,
1164
+ "step": 165
1165
+ },
1166
+ {
1167
+ "epoch": 0.680327868852459,
1168
+ "grad_norm": 0.4729180335998535,
1169
+ "learning_rate": 3.940495471012355e-06,
1170
+ "loss": 1.2179,
1171
+ "step": 166
1172
+ },
1173
+ {
1174
+ "epoch": 0.6844262295081968,
1175
+ "grad_norm": 0.45373761653900146,
1176
+ "learning_rate": 3.926598188654993e-06,
1177
+ "loss": 1.2178,
1178
+ "step": 167
1179
+ },
1180
+ {
1181
+ "epoch": 0.6885245901639344,
1182
+ "grad_norm": 0.37693753838539124,
1183
+ "learning_rate": 3.912635225598739e-06,
1184
+ "loss": 0.923,
1185
+ "step": 168
1186
+ },
1187
+ {
1188
+ "epoch": 0.6926229508196722,
1189
+ "grad_norm": 0.42904287576675415,
1190
+ "learning_rate": 3.898607224699568e-06,
1191
+ "loss": 1.0734,
1192
+ "step": 169
1193
+ },
1194
+ {
1195
+ "epoch": 0.6967213114754098,
1196
+ "grad_norm": 0.46242570877075195,
1197
+ "learning_rate": 3.884514831807805e-06,
1198
+ "loss": 1.2135,
1199
+ "step": 170
1200
+ },
1201
+ {
1202
+ "epoch": 0.7008196721311475,
1203
+ "grad_norm": 0.4614650309085846,
1204
+ "learning_rate": 3.870358695738389e-06,
1205
+ "loss": 1.217,
1206
+ "step": 171
1207
+ },
1208
+ {
1209
+ "epoch": 0.7049180327868853,
1210
+ "grad_norm": 0.4565180540084839,
1211
+ "learning_rate": 3.856139468240996e-06,
1212
+ "loss": 1.2224,
1213
+ "step": 172
1214
+ },
1215
+ {
1216
+ "epoch": 0.7090163934426229,
1217
+ "grad_norm": 0.44657179713249207,
1218
+ "learning_rate": 3.841857803970039e-06,
1219
+ "loss": 1.2107,
1220
+ "step": 173
1221
+ },
1222
+ {
1223
+ "epoch": 0.7131147540983607,
1224
+ "grad_norm": 0.4340556859970093,
1225
+ "learning_rate": 3.827514360454529e-06,
1226
+ "loss": 1.0799,
1227
+ "step": 174
1228
+ },
1229
+ {
1230
+ "epoch": 0.7172131147540983,
1231
+ "grad_norm": 0.4708877205848694,
1232
+ "learning_rate": 3.813109798067789e-06,
1233
+ "loss": 1.2046,
1234
+ "step": 175
1235
+ },
1236
+ {
1237
+ "epoch": 0.7213114754098361,
1238
+ "grad_norm": 0.441154420375824,
1239
+ "learning_rate": 3.79864477999707e-06,
1240
+ "loss": 1.097,
1241
+ "step": 176
1242
+ },
1243
+ {
1244
+ "epoch": 0.7254098360655737,
1245
+ "grad_norm": 0.4938986599445343,
1246
+ "learning_rate": 3.7841199722130016e-06,
1247
+ "loss": 1.0759,
1248
+ "step": 177
1249
+ },
1250
+ {
1251
+ "epoch": 0.7295081967213115,
1252
+ "grad_norm": 0.3727339506149292,
1253
+ "learning_rate": 3.7695360434389385e-06,
1254
+ "loss": 0.946,
1255
+ "step": 178
1256
+ },
1257
+ {
1258
+ "epoch": 0.7336065573770492,
1259
+ "grad_norm": 0.4473823010921478,
1260
+ "learning_rate": 3.754893665120171e-06,
1261
+ "loss": 1.0656,
1262
+ "step": 179
1263
+ },
1264
+ {
1265
+ "epoch": 0.7377049180327869,
1266
+ "grad_norm": 0.4689042866230011,
1267
+ "learning_rate": 3.7401935113930115e-06,
1268
+ "loss": 1.0684,
1269
+ "step": 180
1270
+ },
1271
+ {
1272
+ "epoch": 0.7418032786885246,
1273
+ "grad_norm": 0.40667393803596497,
1274
+ "learning_rate": 3.7254362590537555e-06,
1275
+ "loss": 1.0817,
1276
+ "step": 181
1277
+ },
1278
+ {
1279
+ "epoch": 0.7459016393442623,
1280
+ "grad_norm": 0.44492149353027344,
1281
+ "learning_rate": 3.7106225875275257e-06,
1282
+ "loss": 1.076,
1283
+ "step": 182
1284
+ },
1285
+ {
1286
+ "epoch": 0.75,
1287
+ "grad_norm": 0.438416063785553,
1288
+ "learning_rate": 3.695753178836986e-06,
1289
+ "loss": 1.2181,
1290
+ "step": 183
1291
+ },
1292
+ {
1293
+ "epoch": 0.7540983606557377,
1294
+ "grad_norm": 0.4017588794231415,
1295
+ "learning_rate": 3.680828717570946e-06,
1296
+ "loss": 1.0667,
1297
+ "step": 184
1298
+ },
1299
+ {
1300
+ "epoch": 0.7581967213114754,
1301
+ "grad_norm": 0.42772918939590454,
1302
+ "learning_rate": 3.6658498908528394e-06,
1303
+ "loss": 1.2133,
1304
+ "step": 185
1305
+ },
1306
+ {
1307
+ "epoch": 0.7622950819672131,
1308
+ "grad_norm": 0.4016564190387726,
1309
+ "learning_rate": 3.6508173883090915e-06,
1310
+ "loss": 1.0724,
1311
+ "step": 186
1312
+ },
1313
+ {
1314
+ "epoch": 0.7663934426229508,
1315
+ "grad_norm": 0.4046074151992798,
1316
+ "learning_rate": 3.635731902037364e-06,
1317
+ "loss": 1.0711,
1318
+ "step": 187
1319
+ },
1320
+ {
1321
+ "epoch": 0.7704918032786885,
1322
+ "grad_norm": 0.42936643958091736,
1323
+ "learning_rate": 3.6205941265746976e-06,
1324
+ "loss": 1.0584,
1325
+ "step": 188
1326
+ },
1327
+ {
1328
+ "epoch": 0.7745901639344263,
1329
+ "grad_norm": 0.4360021650791168,
1330
+ "learning_rate": 3.6054047588655287e-06,
1331
+ "loss": 1.2024,
1332
+ "step": 189
1333
+ },
1334
+ {
1335
+ "epoch": 0.7786885245901639,
1336
+ "grad_norm": 0.3777562379837036,
1337
+ "learning_rate": 3.5901644982296058e-06,
1338
+ "loss": 0.9291,
1339
+ "step": 190
1340
+ },
1341
+ {
1342
+ "epoch": 0.7827868852459017,
1343
+ "grad_norm": 0.4032043218612671,
1344
+ "learning_rate": 3.5748740463297926e-06,
1345
+ "loss": 1.1005,
1346
+ "step": 191
1347
+ },
1348
+ {
1349
+ "epoch": 0.7868852459016393,
1350
+ "grad_norm": 0.4494157135486603,
1351
+ "learning_rate": 3.5595341071397627e-06,
1352
+ "loss": 1.2113,
1353
+ "step": 192
1354
+ },
1355
+ {
1356
+ "epoch": 0.7909836065573771,
1357
+ "grad_norm": 0.4484882652759552,
1358
+ "learning_rate": 3.5441453869115885e-06,
1359
+ "loss": 1.2219,
1360
+ "step": 193
1361
+ },
1362
+ {
1363
+ "epoch": 0.7950819672131147,
1364
+ "grad_norm": 0.39648738503456116,
1365
+ "learning_rate": 3.5287085941432246e-06,
1366
+ "loss": 1.0656,
1367
+ "step": 194
1368
+ },
1369
+ {
1370
+ "epoch": 0.7991803278688525,
1371
+ "grad_norm": 0.4165459871292114,
1372
+ "learning_rate": 3.51322443954589e-06,
1373
+ "loss": 1.2178,
1374
+ "step": 195
1375
+ },
1376
+ {
1377
+ "epoch": 0.8032786885245902,
1378
+ "grad_norm": 0.40044641494750977,
1379
+ "learning_rate": 3.4976936360113475e-06,
1380
+ "loss": 1.0698,
1381
+ "step": 196
1382
+ },
1383
+ {
1384
+ "epoch": 0.8073770491803278,
1385
+ "grad_norm": 0.40902015566825867,
1386
+ "learning_rate": 3.4821168985790776e-06,
1387
+ "loss": 1.0673,
1388
+ "step": 197
1389
+ },
1390
+ {
1391
+ "epoch": 0.8114754098360656,
1392
+ "grad_norm": 0.4534085690975189,
1393
+ "learning_rate": 3.4664949444033648e-06,
1394
+ "loss": 1.0669,
1395
+ "step": 198
1396
+ },
1397
+ {
1398
+ "epoch": 0.8155737704918032,
1399
+ "grad_norm": 0.4562125504016876,
1400
+ "learning_rate": 3.4508284927202733e-06,
1401
+ "loss": 1.2094,
1402
+ "step": 199
1403
+ },
1404
+ {
1405
+ "epoch": 0.819672131147541,
1406
+ "grad_norm": 0.3738682270050049,
1407
+ "learning_rate": 3.4351182648145388e-06,
1408
+ "loss": 0.9329,
1409
+ "step": 200
1410
+ },
1411
+ {
1412
+ "epoch": 0.8237704918032787,
1413
+ "grad_norm": 0.37695175409317017,
1414
+ "learning_rate": 3.4193649839863563e-06,
1415
+ "loss": 0.9267,
1416
+ "step": 201
1417
+ },
1418
+ {
1419
+ "epoch": 0.8278688524590164,
1420
+ "grad_norm": 0.43658244609832764,
1421
+ "learning_rate": 3.4035693755180817e-06,
1422
+ "loss": 1.2137,
1423
+ "step": 202
1424
+ },
1425
+ {
1426
+ "epoch": 0.8319672131147541,
1427
+ "grad_norm": 0.4023456573486328,
1428
+ "learning_rate": 3.387732166640837e-06,
1429
+ "loss": 1.0668,
1430
+ "step": 203
1431
+ },
1432
+ {
1433
+ "epoch": 0.8360655737704918,
1434
+ "grad_norm": 0.39854827523231506,
1435
+ "learning_rate": 3.3718540865010348e-06,
1436
+ "loss": 1.0573,
1437
+ "step": 204
1438
+ },
1439
+ {
1440
+ "epoch": 0.8401639344262295,
1441
+ "grad_norm": 0.43412265181541443,
1442
+ "learning_rate": 3.355935866126798e-06,
1443
+ "loss": 1.2174,
1444
+ "step": 205
1445
+ },
1446
+ {
1447
+ "epoch": 0.8442622950819673,
1448
+ "grad_norm": 0.4094075858592987,
1449
+ "learning_rate": 3.3399782383943153e-06,
1450
+ "loss": 1.0851,
1451
+ "step": 206
1452
+ },
1453
+ {
1454
+ "epoch": 0.8483606557377049,
1455
+ "grad_norm": 0.35956600308418274,
1456
+ "learning_rate": 3.3239819379940896e-06,
1457
+ "loss": 0.9296,
1458
+ "step": 207
1459
+ },
1460
+ {
1461
+ "epoch": 0.8524590163934426,
1462
+ "grad_norm": 0.4750164747238159,
1463
+ "learning_rate": 3.3079477013971173e-06,
1464
+ "loss": 1.2161,
1465
+ "step": 208
1466
+ },
1467
+ {
1468
+ "epoch": 0.8565573770491803,
1469
+ "grad_norm": 0.4241454601287842,
1470
+ "learning_rate": 3.2918762668209815e-06,
1471
+ "loss": 1.2151,
1472
+ "step": 209
1473
+ },
1474
+ {
1475
+ "epoch": 0.860655737704918,
1476
+ "grad_norm": 0.39696815609931946,
1477
+ "learning_rate": 3.275768374195862e-06,
1478
+ "loss": 1.0917,
1479
+ "step": 210
1480
+ },
1481
+ {
1482
+ "epoch": 0.8647540983606558,
1483
+ "grad_norm": 0.43563538789749146,
1484
+ "learning_rate": 3.2596247651304715e-06,
1485
+ "loss": 1.2044,
1486
+ "step": 211
1487
+ },
1488
+ {
1489
+ "epoch": 0.8688524590163934,
1490
+ "grad_norm": 0.4409486949443817,
1491
+ "learning_rate": 3.2434461828779096e-06,
1492
+ "loss": 1.0891,
1493
+ "step": 212
1494
+ },
1495
+ {
1496
+ "epoch": 0.8729508196721312,
1497
+ "grad_norm": 0.41782939434051514,
1498
+ "learning_rate": 3.227233372301444e-06,
1499
+ "loss": 1.0679,
1500
+ "step": 213
1501
+ },
1502
+ {
1503
+ "epoch": 0.8770491803278688,
1504
+ "grad_norm": 0.4401546120643616,
1505
+ "learning_rate": 3.2109870798402186e-06,
1506
+ "loss": 1.0658,
1507
+ "step": 214
1508
+ },
1509
+ {
1510
+ "epoch": 0.8811475409836066,
1511
+ "grad_norm": 0.38349413871765137,
1512
+ "learning_rate": 3.194708053474885e-06,
1513
+ "loss": 0.9233,
1514
+ "step": 215
1515
+ },
1516
+ {
1517
+ "epoch": 0.8852459016393442,
1518
+ "grad_norm": 0.42847752571105957,
1519
+ "learning_rate": 3.1783970426931686e-06,
1520
+ "loss": 1.0621,
1521
+ "step": 216
1522
+ },
1523
+ {
1524
+ "epoch": 0.889344262295082,
1525
+ "grad_norm": 0.45880404114723206,
1526
+ "learning_rate": 3.1620547984553563e-06,
1527
+ "loss": 1.2074,
1528
+ "step": 217
1529
+ },
1530
+ {
1531
+ "epoch": 0.8934426229508197,
1532
+ "grad_norm": 0.4133684039115906,
1533
+ "learning_rate": 3.1456820731597283e-06,
1534
+ "loss": 1.0612,
1535
+ "step": 218
1536
+ },
1537
+ {
1538
+ "epoch": 0.8975409836065574,
1539
+ "grad_norm": 0.4100908637046814,
1540
+ "learning_rate": 3.129279620607915e-06,
1541
+ "loss": 1.0776,
1542
+ "step": 219
1543
+ },
1544
+ {
1545
+ "epoch": 0.9016393442622951,
1546
+ "grad_norm": 0.3937971293926239,
1547
+ "learning_rate": 3.1128481959701916e-06,
1548
+ "loss": 1.0704,
1549
+ "step": 220
1550
+ },
1551
+ {
1552
+ "epoch": 0.9057377049180327,
1553
+ "grad_norm": 0.3965698778629303,
1554
+ "learning_rate": 3.096388555750711e-06,
1555
+ "loss": 1.0668,
1556
+ "step": 221
1557
+ },
1558
+ {
1559
+ "epoch": 0.9098360655737705,
1560
+ "grad_norm": 0.43988457322120667,
1561
+ "learning_rate": 3.0799014577526735e-06,
1562
+ "loss": 1.2049,
1563
+ "step": 222
1564
+ },
1565
+ {
1566
+ "epoch": 0.9139344262295082,
1567
+ "grad_norm": 0.41929975152015686,
1568
+ "learning_rate": 3.063387661043438e-06,
1569
+ "loss": 1.0698,
1570
+ "step": 223
1571
+ },
1572
+ {
1573
+ "epoch": 0.9180327868852459,
1574
+ "grad_norm": 0.40265992283821106,
1575
+ "learning_rate": 3.0468479259195753e-06,
1576
+ "loss": 1.0637,
1577
+ "step": 224
1578
+ },
1579
+ {
1580
+ "epoch": 0.9221311475409836,
1581
+ "grad_norm": 0.26293089985847473,
1582
+ "learning_rate": 3.0302830138718605e-06,
1583
+ "loss": 0.5109,
1584
+ "step": 225
1585
+ },
1586
+ {
1587
+ "epoch": 0.9262295081967213,
1588
+ "grad_norm": 0.447826623916626,
1589
+ "learning_rate": 3.0136936875502175e-06,
1590
+ "loss": 1.2053,
1591
+ "step": 226
1592
+ },
1593
+ {
1594
+ "epoch": 0.930327868852459,
1595
+ "grad_norm": 0.34088265895843506,
1596
+ "learning_rate": 2.9970807107286078e-06,
1597
+ "loss": 0.7954,
1598
+ "step": 227
1599
+ },
1600
+ {
1601
+ "epoch": 0.9344262295081968,
1602
+ "grad_norm": 0.4665830135345459,
1603
+ "learning_rate": 2.9804448482698595e-06,
1604
+ "loss": 1.1965,
1605
+ "step": 228
1606
+ },
1607
+ {
1608
+ "epoch": 0.9385245901639344,
1609
+ "grad_norm": 0.40730252861976624,
1610
+ "learning_rate": 2.9637868660904613e-06,
1611
+ "loss": 1.0724,
1612
+ "step": 229
1613
+ },
1614
+ {
1615
+ "epoch": 0.9426229508196722,
1616
+ "grad_norm": 0.4207199513912201,
1617
+ "learning_rate": 2.947107531125292e-06,
1618
+ "loss": 1.206,
1619
+ "step": 230
1620
+ },
1621
+ {
1622
+ "epoch": 0.9467213114754098,
1623
+ "grad_norm": 0.4388861358165741,
1624
+ "learning_rate": 2.9304076112923175e-06,
1625
+ "loss": 1.2049,
1626
+ "step": 231
1627
+ },
1628
+ {
1629
+ "epoch": 0.9508196721311475,
1630
+ "grad_norm": 0.4255218505859375,
1631
+ "learning_rate": 2.9136878754572317e-06,
1632
+ "loss": 1.2082,
1633
+ "step": 232
1634
+ },
1635
+ {
1636
+ "epoch": 0.9549180327868853,
1637
+ "grad_norm": 0.44706571102142334,
1638
+ "learning_rate": 2.896949093398059e-06,
1639
+ "loss": 1.2174,
1640
+ "step": 233
1641
+ },
1642
+ {
1643
+ "epoch": 0.9590163934426229,
1644
+ "grad_norm": 0.41166871786117554,
1645
+ "learning_rate": 2.8801920357697132e-06,
1646
+ "loss": 1.065,
1647
+ "step": 234
1648
+ },
1649
+ {
1650
+ "epoch": 0.9631147540983607,
1651
+ "grad_norm": 0.44747135043144226,
1652
+ "learning_rate": 2.8634174740685165e-06,
1653
+ "loss": 1.1934,
1654
+ "step": 235
1655
+ },
1656
+ {
1657
+ "epoch": 0.9672131147540983,
1658
+ "grad_norm": 0.3774076998233795,
1659
+ "learning_rate": 2.8466261805966797e-06,
1660
+ "loss": 1.0797,
1661
+ "step": 236
1662
+ },
1663
+ {
1664
+ "epoch": 0.9713114754098361,
1665
+ "grad_norm": 0.41140979528427124,
1666
+ "learning_rate": 2.8298189284267492e-06,
1667
+ "loss": 1.0864,
1668
+ "step": 237
1669
+ },
1670
+ {
1671
+ "epoch": 0.9754098360655737,
1672
+ "grad_norm": 0.35885345935821533,
1673
+ "learning_rate": 2.8129964913660056e-06,
1674
+ "loss": 0.9309,
1675
+ "step": 238
1676
+ },
1677
+ {
1678
+ "epoch": 0.9795081967213115,
1679
+ "grad_norm": 0.432704895734787,
1680
+ "learning_rate": 2.796159643920847e-06,
1681
+ "loss": 1.2116,
1682
+ "step": 239
1683
+ },
1684
+ {
1685
+ "epoch": 0.9836065573770492,
1686
+ "grad_norm": 0.45642974972724915,
1687
+ "learning_rate": 2.7793091612611266e-06,
1688
+ "loss": 1.2094,
1689
+ "step": 240
1690
+ },
1691
+ {
1692
+ "epoch": 0.9877049180327869,
1693
+ "grad_norm": 0.38285255432128906,
1694
+ "learning_rate": 2.762445819184463e-06,
1695
+ "loss": 0.9787,
1696
+ "step": 241
1697
+ },
1698
+ {
1699
+ "epoch": 0.9918032786885246,
1700
+ "grad_norm": 0.37903809547424316,
1701
+ "learning_rate": 2.7455703940805228e-06,
1702
+ "loss": 1.0501,
1703
+ "step": 242
1704
+ },
1705
+ {
1706
+ "epoch": 0.9959016393442623,
1707
+ "grad_norm": 0.384909451007843,
1708
+ "learning_rate": 2.7286836628952775e-06,
1709
+ "loss": 0.9503,
1710
+ "step": 243
1711
+ },
1712
+ {
1713
+ "epoch": 1.0,
1714
+ "grad_norm": 0.4128957986831665,
1715
+ "learning_rate": 2.7117864030952306e-06,
1716
+ "loss": 1.0755,
1717
+ "step": 244
1718
+ },
1719
+ {
1720
+ "epoch": 1.0040983606557377,
1721
+ "grad_norm": 0.4136478900909424,
1722
+ "learning_rate": 2.694879392631625e-06,
1723
+ "loss": 1.1887,
1724
+ "step": 245
1725
+ },
1726
+ {
1727
+ "epoch": 1.0081967213114753,
1728
+ "grad_norm": 0.40925952792167664,
1729
+ "learning_rate": 2.677963409904624e-06,
1730
+ "loss": 1.2011,
1731
+ "step": 246
1732
+ },
1733
+ {
1734
+ "epoch": 1.0122950819672132,
1735
+ "grad_norm": 0.3444474935531616,
1736
+ "learning_rate": 2.6610392337274754e-06,
1737
+ "loss": 0.9242,
1738
+ "step": 247
1739
+ },
1740
+ {
1741
+ "epoch": 1.0163934426229508,
1742
+ "grad_norm": 0.4211072027683258,
1743
+ "learning_rate": 2.644107643290653e-06,
1744
+ "loss": 1.1924,
1745
+ "step": 248
1746
+ },
1747
+ {
1748
+ "epoch": 1.0204918032786885,
1749
+ "grad_norm": 0.3850081264972687,
1750
+ "learning_rate": 2.6271694181259854e-06,
1751
+ "loss": 1.052,
1752
+ "step": 249
1753
+ },
1754
+ {
1755
+ "epoch": 1.0245901639344261,
1756
+ "grad_norm": 0.36681899428367615,
1757
+ "learning_rate": 2.6102253380707626e-06,
1758
+ "loss": 0.92,
1759
+ "step": 250
1760
+ },
1761
+ {
1762
+ "epoch": 1.028688524590164,
1763
+ "grad_norm": 0.4259503185749054,
1764
+ "learning_rate": 2.5932761832318354e-06,
1765
+ "loss": 1.191,
1766
+ "step": 251
1767
+ },
1768
+ {
1769
+ "epoch": 1.0327868852459017,
1770
+ "grad_norm": 0.31239956617355347,
1771
+ "learning_rate": 2.5763227339496984e-06,
1772
+ "loss": 0.7821,
1773
+ "step": 252
1774
+ },
1775
+ {
1776
+ "epoch": 1.0368852459016393,
1777
+ "grad_norm": 0.3516213893890381,
1778
+ "learning_rate": 2.5593657707625615e-06,
1779
+ "loss": 0.9113,
1780
+ "step": 253
1781
+ },
1782
+ {
1783
+ "epoch": 1.040983606557377,
1784
+ "grad_norm": 0.4148314595222473,
1785
+ "learning_rate": 2.5424060743704158e-06,
1786
+ "loss": 1.1931,
1787
+ "step": 254
1788
+ },
1789
+ {
1790
+ "epoch": 1.0450819672131149,
1791
+ "grad_norm": 0.39430367946624756,
1792
+ "learning_rate": 2.5254444255990917e-06,
1793
+ "loss": 1.1854,
1794
+ "step": 255
1795
+ },
1796
+ {
1797
+ "epoch": 1.0491803278688525,
1798
+ "grad_norm": 0.3900966942310333,
1799
+ "learning_rate": 2.508481605364302e-06,
1800
+ "loss": 1.0634,
1801
+ "step": 256
1802
+ },
1803
+ {
1804
+ "epoch": 1.0532786885245902,
1805
+ "grad_norm": 0.37411046028137207,
1806
+ "learning_rate": 2.491518394635699e-06,
1807
+ "loss": 1.0725,
1808
+ "step": 257
1809
+ },
1810
+ {
1811
+ "epoch": 1.0573770491803278,
1812
+ "grad_norm": 0.38089755177497864,
1813
+ "learning_rate": 2.4745555744009096e-06,
1814
+ "loss": 1.0629,
1815
+ "step": 258
1816
+ },
1817
+ {
1818
+ "epoch": 1.0614754098360655,
1819
+ "grad_norm": 0.3890707492828369,
1820
+ "learning_rate": 2.4575939256295846e-06,
1821
+ "loss": 1.0625,
1822
+ "step": 259
1823
+ },
1824
+ {
1825
+ "epoch": 1.0655737704918034,
1826
+ "grad_norm": 0.42204442620277405,
1827
+ "learning_rate": 2.44063422923744e-06,
1828
+ "loss": 1.1861,
1829
+ "step": 260
1830
+ },
1831
+ {
1832
+ "epoch": 1.069672131147541,
1833
+ "grad_norm": 0.36366429924964905,
1834
+ "learning_rate": 2.423677266050303e-06,
1835
+ "loss": 0.9154,
1836
+ "step": 261
1837
+ },
1838
+ {
1839
+ "epoch": 1.0737704918032787,
1840
+ "grad_norm": 0.40688806772232056,
1841
+ "learning_rate": 2.4067238167681655e-06,
1842
+ "loss": 1.1794,
1843
+ "step": 262
1844
+ },
1845
+ {
1846
+ "epoch": 1.0778688524590163,
1847
+ "grad_norm": 0.417047381401062,
1848
+ "learning_rate": 2.389774661929238e-06,
1849
+ "loss": 1.19,
1850
+ "step": 263
1851
+ },
1852
+ {
1853
+ "epoch": 1.0819672131147542,
1854
+ "grad_norm": 0.417417049407959,
1855
+ "learning_rate": 2.3728305818740154e-06,
1856
+ "loss": 1.1934,
1857
+ "step": 264
1858
+ },
1859
+ {
1860
+ "epoch": 1.0860655737704918,
1861
+ "grad_norm": 0.3654896914958954,
1862
+ "learning_rate": 2.355892356709347e-06,
1863
+ "loss": 1.0496,
1864
+ "step": 265
1865
+ },
1866
+ {
1867
+ "epoch": 1.0901639344262295,
1868
+ "grad_norm": 0.4088152050971985,
1869
+ "learning_rate": 2.338960766272526e-06,
1870
+ "loss": 1.1898,
1871
+ "step": 266
1872
+ },
1873
+ {
1874
+ "epoch": 1.0942622950819672,
1875
+ "grad_norm": 0.3819476366043091,
1876
+ "learning_rate": 2.3220365900953767e-06,
1877
+ "loss": 1.0567,
1878
+ "step": 267
1879
+ },
1880
+ {
1881
+ "epoch": 1.098360655737705,
1882
+ "grad_norm": 0.42158523201942444,
1883
+ "learning_rate": 2.3051206073683755e-06,
1884
+ "loss": 1.0636,
1885
+ "step": 268
1886
+ },
1887
+ {
1888
+ "epoch": 1.1024590163934427,
1889
+ "grad_norm": 0.42742791771888733,
1890
+ "learning_rate": 2.2882135969047703e-06,
1891
+ "loss": 1.1936,
1892
+ "step": 269
1893
+ },
1894
+ {
1895
+ "epoch": 1.1065573770491803,
1896
+ "grad_norm": 0.44875332713127136,
1897
+ "learning_rate": 2.2713163371047234e-06,
1898
+ "loss": 1.1967,
1899
+ "step": 270
1900
+ },
1901
+ {
1902
+ "epoch": 1.110655737704918,
1903
+ "grad_norm": 0.4295426309108734,
1904
+ "learning_rate": 2.2544296059194777e-06,
1905
+ "loss": 1.1888,
1906
+ "step": 271
1907
+ },
1908
+ {
1909
+ "epoch": 1.1147540983606556,
1910
+ "grad_norm": 0.3944399058818817,
1911
+ "learning_rate": 2.237554180815538e-06,
1912
+ "loss": 1.0568,
1913
+ "step": 272
1914
+ },
1915
+ {
1916
+ "epoch": 1.1188524590163935,
1917
+ "grad_norm": 0.39292725920677185,
1918
+ "learning_rate": 2.220690838738874e-06,
1919
+ "loss": 1.0688,
1920
+ "step": 273
1921
+ },
1922
+ {
1923
+ "epoch": 1.1229508196721312,
1924
+ "grad_norm": 0.4272754490375519,
1925
+ "learning_rate": 2.203840356079154e-06,
1926
+ "loss": 1.1974,
1927
+ "step": 274
1928
+ },
1929
+ {
1930
+ "epoch": 1.1270491803278688,
1931
+ "grad_norm": 0.4205959737300873,
1932
+ "learning_rate": 2.1870035086339957e-06,
1933
+ "loss": 1.1913,
1934
+ "step": 275
1935
+ },
1936
+ {
1937
+ "epoch": 1.1311475409836065,
1938
+ "grad_norm": 0.3894692361354828,
1939
+ "learning_rate": 2.170181071573252e-06,
1940
+ "loss": 1.0451,
1941
+ "step": 276
1942
+ },
1943
+ {
1944
+ "epoch": 1.1352459016393444,
1945
+ "grad_norm": 0.41992002725601196,
1946
+ "learning_rate": 2.1533738194033207e-06,
1947
+ "loss": 1.0575,
1948
+ "step": 277
1949
+ },
1950
+ {
1951
+ "epoch": 1.139344262295082,
1952
+ "grad_norm": 0.3990057408809662,
1953
+ "learning_rate": 2.1365825259314843e-06,
1954
+ "loss": 1.0577,
1955
+ "step": 278
1956
+ },
1957
+ {
1958
+ "epoch": 1.1434426229508197,
1959
+ "grad_norm": 0.3570830523967743,
1960
+ "learning_rate": 2.119807964230287e-06,
1961
+ "loss": 0.9146,
1962
+ "step": 279
1963
+ },
1964
+ {
1965
+ "epoch": 1.1475409836065573,
1966
+ "grad_norm": 0.38071316480636597,
1967
+ "learning_rate": 2.1030509066019412e-06,
1968
+ "loss": 1.054,
1969
+ "step": 280
1970
+ },
1971
+ {
1972
+ "epoch": 1.151639344262295,
1973
+ "grad_norm": 0.35822221636772156,
1974
+ "learning_rate": 2.0863121245427683e-06,
1975
+ "loss": 0.9101,
1976
+ "step": 281
1977
+ },
1978
+ {
1979
+ "epoch": 1.1557377049180328,
1980
+ "grad_norm": 0.38994014263153076,
1981
+ "learning_rate": 2.0695923887076824e-06,
1982
+ "loss": 1.0592,
1983
+ "step": 282
1984
+ },
1985
+ {
1986
+ "epoch": 1.1598360655737705,
1987
+ "grad_norm": 0.4252474904060364,
1988
+ "learning_rate": 2.0528924688747094e-06,
1989
+ "loss": 1.1895,
1990
+ "step": 283
1991
+ },
1992
+ {
1993
+ "epoch": 1.1639344262295082,
1994
+ "grad_norm": 0.40283524990081787,
1995
+ "learning_rate": 2.0362131339095404e-06,
1996
+ "loss": 1.1983,
1997
+ "step": 284
1998
+ },
1999
+ {
2000
+ "epoch": 1.1680327868852458,
2001
+ "grad_norm": 0.3897688686847687,
2002
+ "learning_rate": 2.0195551517301413e-06,
2003
+ "loss": 1.0507,
2004
+ "step": 285
2005
+ },
2006
+ {
2007
+ "epoch": 1.1721311475409837,
2008
+ "grad_norm": 0.4026731848716736,
2009
+ "learning_rate": 2.0029192892713926e-06,
2010
+ "loss": 1.0473,
2011
+ "step": 286
2012
+ },
2013
+ {
2014
+ "epoch": 1.1762295081967213,
2015
+ "grad_norm": 0.36450186371803284,
2016
+ "learning_rate": 1.986306312449783e-06,
2017
+ "loss": 0.9175,
2018
+ "step": 287
2019
+ },
2020
+ {
2021
+ "epoch": 1.180327868852459,
2022
+ "grad_norm": 0.4068233072757721,
2023
+ "learning_rate": 1.9697169861281404e-06,
2024
+ "loss": 1.0451,
2025
+ "step": 288
2026
+ },
2027
+ {
2028
+ "epoch": 1.1844262295081966,
2029
+ "grad_norm": 0.4182148873806,
2030
+ "learning_rate": 1.953152074080425e-06,
2031
+ "loss": 1.061,
2032
+ "step": 289
2033
+ },
2034
+ {
2035
+ "epoch": 1.1885245901639343,
2036
+ "grad_norm": 0.45543500781059265,
2037
+ "learning_rate": 1.936612338956562e-06,
2038
+ "loss": 1.1804,
2039
+ "step": 290
2040
+ },
2041
+ {
2042
+ "epoch": 1.1926229508196722,
2043
+ "grad_norm": 0.3845888674259186,
2044
+ "learning_rate": 1.9200985422473265e-06,
2045
+ "loss": 1.0704,
2046
+ "step": 291
2047
+ },
2048
+ {
2049
+ "epoch": 1.1967213114754098,
2050
+ "grad_norm": 0.3593854606151581,
2051
+ "learning_rate": 1.9036114442492901e-06,
2052
+ "loss": 0.9752,
2053
+ "step": 292
2054
+ },
2055
+ {
2056
+ "epoch": 1.2008196721311475,
2057
+ "grad_norm": 0.35712385177612305,
2058
+ "learning_rate": 1.8871518040298092e-06,
2059
+ "loss": 0.9091,
2060
+ "step": 293
2061
+ },
2062
+ {
2063
+ "epoch": 1.2049180327868854,
2064
+ "grad_norm": 0.3920156955718994,
2065
+ "learning_rate": 1.870720379392086e-06,
2066
+ "loss": 1.0643,
2067
+ "step": 294
2068
+ },
2069
+ {
2070
+ "epoch": 1.209016393442623,
2071
+ "grad_norm": 0.3904518485069275,
2072
+ "learning_rate": 1.8543179268402728e-06,
2073
+ "loss": 1.0474,
2074
+ "step": 295
2075
+ },
2076
+ {
2077
+ "epoch": 1.2131147540983607,
2078
+ "grad_norm": 0.3518437147140503,
2079
+ "learning_rate": 1.8379452015446445e-06,
2080
+ "loss": 0.9428,
2081
+ "step": 296
2082
+ },
2083
+ {
2084
+ "epoch": 1.2172131147540983,
2085
+ "grad_norm": 0.40187716484069824,
2086
+ "learning_rate": 1.8216029573068316e-06,
2087
+ "loss": 1.186,
2088
+ "step": 297
2089
+ },
2090
+ {
2091
+ "epoch": 1.221311475409836,
2092
+ "grad_norm": 0.420866459608078,
2093
+ "learning_rate": 1.8052919465251146e-06,
2094
+ "loss": 1.1936,
2095
+ "step": 298
2096
+ },
2097
+ {
2098
+ "epoch": 1.2254098360655739,
2099
+ "grad_norm": 0.4169630706310272,
2100
+ "learning_rate": 1.7890129201597818e-06,
2101
+ "loss": 1.204,
2102
+ "step": 299
2103
+ },
2104
+ {
2105
+ "epoch": 1.2295081967213115,
2106
+ "grad_norm": 0.3857581913471222,
2107
+ "learning_rate": 1.772766627698556e-06,
2108
+ "loss": 1.0698,
2109
+ "step": 300
2110
+ },
2111
+ {
2112
+ "epoch": 1.2336065573770492,
2113
+ "grad_norm": 0.3789856731891632,
2114
+ "learning_rate": 1.7565538171220919e-06,
2115
+ "loss": 1.0557,
2116
+ "step": 301
2117
+ },
2118
+ {
2119
+ "epoch": 1.2377049180327868,
2120
+ "grad_norm": 0.38153553009033203,
2121
+ "learning_rate": 1.7403752348695296e-06,
2122
+ "loss": 1.0574,
2123
+ "step": 302
2124
+ },
2125
+ {
2126
+ "epoch": 1.2418032786885247,
2127
+ "grad_norm": 0.34474989771842957,
2128
+ "learning_rate": 1.7242316258041392e-06,
2129
+ "loss": 0.7975,
2130
+ "step": 303
2131
+ },
2132
+ {
2133
+ "epoch": 1.2459016393442623,
2134
+ "grad_norm": 0.3958248496055603,
2135
+ "learning_rate": 1.7081237331790196e-06,
2136
+ "loss": 1.1785,
2137
+ "step": 304
2138
+ },
2139
+ {
2140
+ "epoch": 1.25,
2141
+ "grad_norm": 0.41348475217819214,
2142
+ "learning_rate": 1.6920522986028832e-06,
2143
+ "loss": 1.1993,
2144
+ "step": 305
2145
+ },
2146
+ {
2147
+ "epoch": 1.2540983606557377,
2148
+ "grad_norm": 0.40045955777168274,
2149
+ "learning_rate": 1.6760180620059108e-06,
2150
+ "loss": 1.1961,
2151
+ "step": 306
2152
+ },
2153
+ {
2154
+ "epoch": 1.2581967213114753,
2155
+ "grad_norm": 0.38317596912384033,
2156
+ "learning_rate": 1.6600217616056847e-06,
2157
+ "loss": 1.058,
2158
+ "step": 307
2159
+ },
2160
+ {
2161
+ "epoch": 1.2622950819672132,
2162
+ "grad_norm": 0.40226680040359497,
2163
+ "learning_rate": 1.6440641338732017e-06,
2164
+ "loss": 1.1906,
2165
+ "step": 308
2166
+ },
2167
+ {
2168
+ "epoch": 1.2663934426229508,
2169
+ "grad_norm": 0.35721513628959656,
2170
+ "learning_rate": 1.6281459134989667e-06,
2171
+ "loss": 1.0539,
2172
+ "step": 309
2173
+ },
2174
+ {
2175
+ "epoch": 1.2704918032786885,
2176
+ "grad_norm": 0.4084949493408203,
2177
+ "learning_rate": 1.6122678333591635e-06,
2178
+ "loss": 1.1968,
2179
+ "step": 310
2180
+ },
2181
+ {
2182
+ "epoch": 1.2745901639344264,
2183
+ "grad_norm": 0.4332444369792938,
2184
+ "learning_rate": 1.59643062448192e-06,
2185
+ "loss": 1.1811,
2186
+ "step": 311
2187
+ },
2188
+ {
2189
+ "epoch": 1.278688524590164,
2190
+ "grad_norm": 0.39750587940216064,
2191
+ "learning_rate": 1.5806350160136446e-06,
2192
+ "loss": 1.1941,
2193
+ "step": 312
2194
+ },
2195
+ {
2196
+ "epoch": 1.2827868852459017,
2197
+ "grad_norm": 0.4312925338745117,
2198
+ "learning_rate": 1.564881735185462e-06,
2199
+ "loss": 1.2058,
2200
+ "step": 313
2201
+ },
2202
+ {
2203
+ "epoch": 1.2868852459016393,
2204
+ "grad_norm": 0.36609697341918945,
2205
+ "learning_rate": 1.5491715072797273e-06,
2206
+ "loss": 1.0492,
2207
+ "step": 314
2208
+ },
2209
+ {
2210
+ "epoch": 1.290983606557377,
2211
+ "grad_norm": 0.37650904059410095,
2212
+ "learning_rate": 1.533505055596636e-06,
2213
+ "loss": 1.0519,
2214
+ "step": 315
2215
+ },
2216
+ {
2217
+ "epoch": 1.2950819672131146,
2218
+ "grad_norm": 0.3837479054927826,
2219
+ "learning_rate": 1.5178831014209228e-06,
2220
+ "loss": 1.198,
2221
+ "step": 316
2222
+ },
2223
+ {
2224
+ "epoch": 1.2991803278688525,
2225
+ "grad_norm": 0.36572346091270447,
2226
+ "learning_rate": 1.5023063639886534e-06,
2227
+ "loss": 0.9298,
2228
+ "step": 317
2229
+ },
2230
+ {
2231
+ "epoch": 1.3032786885245902,
2232
+ "grad_norm": 0.41345280408859253,
2233
+ "learning_rate": 1.4867755604541106e-06,
2234
+ "loss": 1.1867,
2235
+ "step": 318
2236
+ },
2237
+ {
2238
+ "epoch": 1.3073770491803278,
2239
+ "grad_norm": 0.3215678334236145,
2240
+ "learning_rate": 1.4712914058567764e-06,
2241
+ "loss": 0.793,
2242
+ "step": 319
2243
+ },
2244
+ {
2245
+ "epoch": 1.3114754098360657,
2246
+ "grad_norm": 0.383251816034317,
2247
+ "learning_rate": 1.4558546130884124e-06,
2248
+ "loss": 0.9365,
2249
+ "step": 320
2250
+ },
2251
+ {
2252
+ "epoch": 1.3155737704918034,
2253
+ "grad_norm": 0.3637905716896057,
2254
+ "learning_rate": 1.440465892860237e-06,
2255
+ "loss": 0.9107,
2256
+ "step": 321
2257
+ },
2258
+ {
2259
+ "epoch": 1.319672131147541,
2260
+ "grad_norm": 0.4390401542186737,
2261
+ "learning_rate": 1.4251259536702078e-06,
2262
+ "loss": 1.1866,
2263
+ "step": 322
2264
+ },
2265
+ {
2266
+ "epoch": 1.3237704918032787,
2267
+ "grad_norm": 0.41249939799308777,
2268
+ "learning_rate": 1.4098355017703953e-06,
2269
+ "loss": 1.1923,
2270
+ "step": 323
2271
+ },
2272
+ {
2273
+ "epoch": 1.3278688524590163,
2274
+ "grad_norm": 0.37959370017051697,
2275
+ "learning_rate": 1.3945952411344721e-06,
2276
+ "loss": 1.0448,
2277
+ "step": 324
2278
+ },
2279
+ {
2280
+ "epoch": 1.331967213114754,
2281
+ "grad_norm": 0.39375776052474976,
2282
+ "learning_rate": 1.3794058734253032e-06,
2283
+ "loss": 1.1925,
2284
+ "step": 325
2285
+ },
2286
+ {
2287
+ "epoch": 1.3360655737704918,
2288
+ "grad_norm": 0.3478122353553772,
2289
+ "learning_rate": 1.3642680979626358e-06,
2290
+ "loss": 0.7892,
2291
+ "step": 326
2292
+ },
2293
+ {
2294
+ "epoch": 1.3401639344262295,
2295
+ "grad_norm": 0.3544688820838928,
2296
+ "learning_rate": 1.3491826116909102e-06,
2297
+ "loss": 0.9089,
2298
+ "step": 327
2299
+ },
2300
+ {
2301
+ "epoch": 1.3442622950819672,
2302
+ "grad_norm": 0.40030068159103394,
2303
+ "learning_rate": 1.334150109147161e-06,
2304
+ "loss": 1.189,
2305
+ "step": 328
2306
+ },
2307
+ {
2308
+ "epoch": 1.348360655737705,
2309
+ "grad_norm": 0.367484450340271,
2310
+ "learning_rate": 1.319171282429055e-06,
2311
+ "loss": 1.0548,
2312
+ "step": 329
2313
+ },
2314
+ {
2315
+ "epoch": 1.3524590163934427,
2316
+ "grad_norm": 0.4142979085445404,
2317
+ "learning_rate": 1.3042468211630151e-06,
2318
+ "loss": 1.0805,
2319
+ "step": 330
2320
+ },
2321
+ {
2322
+ "epoch": 1.3565573770491803,
2323
+ "grad_norm": 0.38454100489616394,
2324
+ "learning_rate": 1.289377412472475e-06,
2325
+ "loss": 1.0574,
2326
+ "step": 331
2327
+ },
2328
+ {
2329
+ "epoch": 1.360655737704918,
2330
+ "grad_norm": 0.3808493912220001,
2331
+ "learning_rate": 1.2745637409462447e-06,
2332
+ "loss": 1.0552,
2333
+ "step": 332
2334
+ },
2335
+ {
2336
+ "epoch": 1.3647540983606556,
2337
+ "grad_norm": 0.42550456523895264,
2338
+ "learning_rate": 1.2598064886069883e-06,
2339
+ "loss": 1.1951,
2340
+ "step": 333
2341
+ },
2342
+ {
2343
+ "epoch": 1.3688524590163935,
2344
+ "grad_norm": 0.4005252420902252,
2345
+ "learning_rate": 1.245106334879829e-06,
2346
+ "loss": 1.1944,
2347
+ "step": 334
2348
+ },
2349
+ {
2350
+ "epoch": 1.3729508196721312,
2351
+ "grad_norm": 0.34700438380241394,
2352
+ "learning_rate": 1.2304639565610622e-06,
2353
+ "loss": 1.0493,
2354
+ "step": 335
2355
+ },
2356
+ {
2357
+ "epoch": 1.3770491803278688,
2358
+ "grad_norm": 0.3509901762008667,
2359
+ "learning_rate": 1.2158800277869999e-06,
2360
+ "loss": 0.9181,
2361
+ "step": 336
2362
+ },
2363
+ {
2364
+ "epoch": 1.3811475409836065,
2365
+ "grad_norm": 0.3999477028846741,
2366
+ "learning_rate": 1.2013552200029308e-06,
2367
+ "loss": 1.0573,
2368
+ "step": 337
2369
+ },
2370
+ {
2371
+ "epoch": 1.3852459016393444,
2372
+ "grad_norm": 0.37565454840660095,
2373
+ "learning_rate": 1.1868902019322118e-06,
2374
+ "loss": 1.0646,
2375
+ "step": 338
2376
+ },
2377
+ {
2378
+ "epoch": 1.389344262295082,
2379
+ "grad_norm": 0.3885054886341095,
2380
+ "learning_rate": 1.1724856395454732e-06,
2381
+ "loss": 1.0544,
2382
+ "step": 339
2383
+ },
2384
+ {
2385
+ "epoch": 1.3934426229508197,
2386
+ "grad_norm": 0.3447207808494568,
2387
+ "learning_rate": 1.1581421960299606e-06,
2388
+ "loss": 0.9274,
2389
+ "step": 340
2390
+ },
2391
+ {
2392
+ "epoch": 1.3975409836065573,
2393
+ "grad_norm": 0.36152341961860657,
2394
+ "learning_rate": 1.1438605317590049e-06,
2395
+ "loss": 1.0528,
2396
+ "step": 341
2397
+ },
2398
+ {
2399
+ "epoch": 1.401639344262295,
2400
+ "grad_norm": 0.35875821113586426,
2401
+ "learning_rate": 1.1296413042616115e-06,
2402
+ "loss": 1.0553,
2403
+ "step": 342
2404
+ },
2405
+ {
2406
+ "epoch": 1.4057377049180328,
2407
+ "grad_norm": 0.37769293785095215,
2408
+ "learning_rate": 1.1154851681921947e-06,
2409
+ "loss": 1.0572,
2410
+ "step": 343
2411
+ },
2412
+ {
2413
+ "epoch": 1.4098360655737705,
2414
+ "grad_norm": 0.40368834137916565,
2415
+ "learning_rate": 1.1013927753004325e-06,
2416
+ "loss": 1.1903,
2417
+ "step": 344
2418
+ },
2419
+ {
2420
+ "epoch": 1.4139344262295082,
2421
+ "grad_norm": 0.3823951780796051,
2422
+ "learning_rate": 1.087364774401262e-06,
2423
+ "loss": 1.0474,
2424
+ "step": 345
2425
+ },
2426
+ {
2427
+ "epoch": 1.418032786885246,
2428
+ "grad_norm": 0.40228381752967834,
2429
+ "learning_rate": 1.0734018113450076e-06,
2430
+ "loss": 1.0517,
2431
+ "step": 346
2432
+ },
2433
+ {
2434
+ "epoch": 1.4221311475409837,
2435
+ "grad_norm": 0.3563799262046814,
2436
+ "learning_rate": 1.0595045289876454e-06,
2437
+ "loss": 1.0616,
2438
+ "step": 347
2439
+ },
2440
+ {
2441
+ "epoch": 1.4262295081967213,
2442
+ "grad_norm": 0.3526061475276947,
2443
+ "learning_rate": 1.0456735671612092e-06,
2444
+ "loss": 0.911,
2445
+ "step": 348
2446
+ },
2447
+ {
2448
+ "epoch": 1.430327868852459,
2449
+ "grad_norm": 0.39378660917282104,
2450
+ "learning_rate": 1.0319095626443263e-06,
2451
+ "loss": 1.1888,
2452
+ "step": 349
2453
+ },
2454
+ {
2455
+ "epoch": 1.4344262295081966,
2456
+ "grad_norm": 0.3753567934036255,
2457
+ "learning_rate": 1.0182131491329077e-06,
2458
+ "loss": 1.0633,
2459
+ "step": 350
2460
+ },
2461
+ {
2462
+ "epoch": 1.4385245901639343,
2463
+ "grad_norm": 0.40904322266578674,
2464
+ "learning_rate": 1.004584957210967e-06,
2465
+ "loss": 1.1776,
2466
+ "step": 351
2467
+ },
2468
+ {
2469
+ "epoch": 1.4426229508196722,
2470
+ "grad_norm": 0.4176303744316101,
2471
+ "learning_rate": 9.910256143215882e-07,
2472
+ "loss": 1.1925,
2473
+ "step": 352
2474
+ },
2475
+ {
2476
+ "epoch": 1.4467213114754098,
2477
+ "grad_norm": 0.3339522182941437,
2478
+ "learning_rate": 9.775357447380457e-07,
2479
+ "loss": 0.9168,
2480
+ "step": 353
2481
+ },
2482
+ {
2483
+ "epoch": 1.4508196721311475,
2484
+ "grad_norm": 0.3459610044956207,
2485
+ "learning_rate": 9.6411596953505e-07,
2486
+ "loss": 0.9125,
2487
+ "step": 354
2488
+ },
2489
+ {
2490
+ "epoch": 1.4549180327868854,
2491
+ "grad_norm": 0.364564448595047,
2492
+ "learning_rate": 9.507669065601663e-07,
2493
+ "loss": 1.0402,
2494
+ "step": 355
2495
+ },
2496
+ {
2497
+ "epoch": 1.459016393442623,
2498
+ "grad_norm": 0.4287342131137848,
2499
+ "learning_rate": 9.374891704053596e-07,
2500
+ "loss": 1.1834,
2501
+ "step": 356
2502
+ },
2503
+ {
2504
+ "epoch": 1.4631147540983607,
2505
+ "grad_norm": 0.41111865639686584,
2506
+ "learning_rate": 9.242833723787051e-07,
2507
+ "loss": 1.1882,
2508
+ "step": 357
2509
+ },
2510
+ {
2511
+ "epoch": 1.4672131147540983,
2512
+ "grad_norm": 0.3417908549308777,
2513
+ "learning_rate": 9.111501204762368e-07,
2514
+ "loss": 0.9154,
2515
+ "step": 358
2516
+ },
2517
+ {
2518
+ "epoch": 1.471311475409836,
2519
+ "grad_norm": 0.3898267447948456,
2520
+ "learning_rate": 8.980900193539632e-07,
2521
+ "loss": 1.2006,
2522
+ "step": 359
2523
+ },
2524
+ {
2525
+ "epoch": 1.4754098360655736,
2526
+ "grad_norm": 0.3840121924877167,
2527
+ "learning_rate": 8.851036703000223e-07,
2528
+ "loss": 1.1914,
2529
+ "step": 360
2530
+ },
2531
+ {
2532
+ "epoch": 1.4795081967213115,
2533
+ "grad_norm": 0.3572332561016083,
2534
+ "learning_rate": 8.72191671207e-07,
2535
+ "loss": 0.9258,
2536
+ "step": 361
2537
+ },
2538
+ {
2539
+ "epoch": 1.4836065573770492,
2540
+ "grad_norm": 0.4421536922454834,
2541
+ "learning_rate": 8.593546165444078e-07,
2542
+ "loss": 1.189,
2543
+ "step": 362
2544
+ },
2545
+ {
2546
+ "epoch": 1.4877049180327868,
2547
+ "grad_norm": 0.347675621509552,
2548
+ "learning_rate": 8.465930973313033e-07,
2549
+ "loss": 0.9129,
2550
+ "step": 363
2551
+ },
2552
+ {
2553
+ "epoch": 1.4918032786885247,
2554
+ "grad_norm": 0.33754611015319824,
2555
+ "learning_rate": 8.339077011090921e-07,
2556
+ "loss": 0.9226,
2557
+ "step": 364
2558
+ },
2559
+ {
2560
+ "epoch": 1.4959016393442623,
2561
+ "grad_norm": 0.38230884075164795,
2562
+ "learning_rate": 8.212990119144662e-07,
2563
+ "loss": 1.1968,
2564
+ "step": 365
2565
+ },
2566
+ {
2567
+ "epoch": 1.5,
2568
+ "grad_norm": 0.4168895483016968,
2569
+ "learning_rate": 8.08767610252523e-07,
2570
+ "loss": 1.195,
2571
+ "step": 366
2572
+ },
2573
+ {
2574
+ "epoch": 1.5040983606557377,
2575
+ "grad_norm": 0.3592976927757263,
2576
+ "learning_rate": 7.963140730700337e-07,
2577
+ "loss": 1.0456,
2578
+ "step": 367
2579
+ },
2580
+ {
2581
+ "epoch": 1.5081967213114753,
2582
+ "grad_norm": 0.3613436818122864,
2583
+ "learning_rate": 7.839389737288816e-07,
2584
+ "loss": 1.0572,
2585
+ "step": 368
2586
+ },
2587
+ {
2588
+ "epoch": 1.512295081967213,
2589
+ "grad_norm": 0.4102155268192291,
2590
+ "learning_rate": 7.716428819796681e-07,
2591
+ "loss": 1.1947,
2592
+ "step": 369
2593
+ },
2594
+ {
2595
+ "epoch": 1.5163934426229508,
2596
+ "grad_norm": 0.4009557366371155,
2597
+ "learning_rate": 7.594263639354757e-07,
2598
+ "loss": 1.1962,
2599
+ "step": 370
2600
+ },
2601
+ {
2602
+ "epoch": 1.5204918032786885,
2603
+ "grad_norm": 0.3762926161289215,
2604
+ "learning_rate": 7.472899820458099e-07,
2605
+ "loss": 1.1895,
2606
+ "step": 371
2607
+ },
2608
+ {
2609
+ "epoch": 1.5245901639344264,
2610
+ "grad_norm": 0.41125839948654175,
2611
+ "learning_rate": 7.352342950706964e-07,
2612
+ "loss": 1.0551,
2613
+ "step": 372
2614
+ },
2615
+ {
2616
+ "epoch": 1.528688524590164,
2617
+ "grad_norm": 0.3519588112831116,
2618
+ "learning_rate": 7.232598580549652e-07,
2619
+ "loss": 1.0373,
2620
+ "step": 373
2621
+ },
2622
+ {
2623
+ "epoch": 1.5327868852459017,
2624
+ "grad_norm": 0.3900514841079712,
2625
+ "learning_rate": 7.113672223026879e-07,
2626
+ "loss": 1.063,
2627
+ "step": 374
2628
+ },
2629
+ {
2630
+ "epoch": 1.5368852459016393,
2631
+ "grad_norm": 0.3782947063446045,
2632
+ "learning_rate": 6.995569353518025e-07,
2633
+ "loss": 1.1996,
2634
+ "step": 375
2635
+ },
2636
+ {
2637
+ "epoch": 1.540983606557377,
2638
+ "grad_norm": 0.3795226216316223,
2639
+ "learning_rate": 6.878295409488986e-07,
2640
+ "loss": 1.0528,
2641
+ "step": 376
2642
+ },
2643
+ {
2644
+ "epoch": 1.5450819672131146,
2645
+ "grad_norm": 0.3658047616481781,
2646
+ "learning_rate": 6.761855790241858e-07,
2647
+ "loss": 1.0476,
2648
+ "step": 377
2649
+ },
2650
+ {
2651
+ "epoch": 1.5491803278688525,
2652
+ "grad_norm": 0.3761068880558014,
2653
+ "learning_rate": 6.646255856666383e-07,
2654
+ "loss": 1.0527,
2655
+ "step": 378
2656
+ },
2657
+ {
2658
+ "epoch": 1.5532786885245902,
2659
+ "grad_norm": 0.4113335907459259,
2660
+ "learning_rate": 6.531500930993081e-07,
2661
+ "loss": 1.0453,
2662
+ "step": 379
2663
+ },
2664
+ {
2665
+ "epoch": 1.5573770491803278,
2666
+ "grad_norm": 0.3439164161682129,
2667
+ "learning_rate": 6.417596296548243e-07,
2668
+ "loss": 1.0552,
2669
+ "step": 380
2670
+ },
2671
+ {
2672
+ "epoch": 1.5614754098360657,
2673
+ "grad_norm": 0.391366183757782,
2674
+ "learning_rate": 6.304547197510677e-07,
2675
+ "loss": 1.1813,
2676
+ "step": 381
2677
+ },
2678
+ {
2679
+ "epoch": 1.5655737704918034,
2680
+ "grad_norm": 0.3553384840488434,
2681
+ "learning_rate": 6.192358838670293e-07,
2682
+ "loss": 1.1861,
2683
+ "step": 382
2684
+ },
2685
+ {
2686
+ "epoch": 1.569672131147541,
2687
+ "grad_norm": 0.3192722201347351,
2688
+ "learning_rate": 6.081036385188424e-07,
2689
+ "loss": 0.7907,
2690
+ "step": 383
2691
+ },
2692
+ {
2693
+ "epoch": 1.5737704918032787,
2694
+ "grad_norm": 0.37627875804901123,
2695
+ "learning_rate": 5.970584962360052e-07,
2696
+ "loss": 1.0505,
2697
+ "step": 384
2698
+ },
2699
+ {
2700
+ "epoch": 1.5778688524590163,
2701
+ "grad_norm": 0.3651406168937683,
2702
+ "learning_rate": 5.861009655377859e-07,
2703
+ "loss": 1.1864,
2704
+ "step": 385
2705
+ },
2706
+ {
2707
+ "epoch": 1.581967213114754,
2708
+ "grad_norm": 0.35879752039909363,
2709
+ "learning_rate": 5.752315509098044e-07,
2710
+ "loss": 1.044,
2711
+ "step": 386
2712
+ },
2713
+ {
2714
+ "epoch": 1.5860655737704918,
2715
+ "grad_norm": 0.40529587864875793,
2716
+ "learning_rate": 5.644507527808135e-07,
2717
+ "loss": 1.1828,
2718
+ "step": 387
2719
+ },
2720
+ {
2721
+ "epoch": 1.5901639344262295,
2722
+ "grad_norm": 0.3214077949523926,
2723
+ "learning_rate": 5.537590674996521e-07,
2724
+ "loss": 0.9237,
2725
+ "step": 388
2726
+ },
2727
+ {
2728
+ "epoch": 1.5942622950819674,
2729
+ "grad_norm": 0.3651806712150574,
2730
+ "learning_rate": 5.431569873123965e-07,
2731
+ "loss": 1.0476,
2732
+ "step": 389
2733
+ },
2734
+ {
2735
+ "epoch": 1.598360655737705,
2736
+ "grad_norm": 0.3352358937263489,
2737
+ "learning_rate": 5.326450003396977e-07,
2738
+ "loss": 0.9129,
2739
+ "step": 390
2740
+ },
2741
+ {
2742
+ "epoch": 1.6024590163934427,
2743
+ "grad_norm": 0.32190605998039246,
2744
+ "learning_rate": 5.222235905543083e-07,
2745
+ "loss": 0.9306,
2746
+ "step": 391
2747
+ },
2748
+ {
2749
+ "epoch": 1.6065573770491803,
2750
+ "grad_norm": 0.38101914525032043,
2751
+ "learning_rate": 5.118932377587984e-07,
2752
+ "loss": 1.1886,
2753
+ "step": 392
2754
+ },
2755
+ {
2756
+ "epoch": 1.610655737704918,
2757
+ "grad_norm": 0.35761508345603943,
2758
+ "learning_rate": 5.016544175634669e-07,
2759
+ "loss": 1.0481,
2760
+ "step": 393
2761
+ },
2762
+ {
2763
+ "epoch": 1.6147540983606556,
2764
+ "grad_norm": 0.39952346682548523,
2765
+ "learning_rate": 4.915076013644454e-07,
2766
+ "loss": 1.1971,
2767
+ "step": 394
2768
+ },
2769
+ {
2770
+ "epoch": 1.6188524590163933,
2771
+ "grad_norm": 0.35433509945869446,
2772
+ "learning_rate": 4.814532563219921e-07,
2773
+ "loss": 1.0526,
2774
+ "step": 395
2775
+ },
2776
+ {
2777
+ "epoch": 1.6229508196721312,
2778
+ "grad_norm": 0.3517802059650421,
2779
+ "learning_rate": 4.714918453389875e-07,
2780
+ "loss": 0.9118,
2781
+ "step": 396
2782
+ },
2783
+ {
2784
+ "epoch": 1.6270491803278688,
2785
+ "grad_norm": 0.34128230810165405,
2786
+ "learning_rate": 4.6162382703961836e-07,
2787
+ "loss": 1.0548,
2788
+ "step": 397
2789
+ },
2790
+ {
2791
+ "epoch": 1.6311475409836067,
2792
+ "grad_norm": 0.38127487897872925,
2793
+ "learning_rate": 4.51849655748266e-07,
2794
+ "loss": 1.1889,
2795
+ "step": 398
2796
+ },
2797
+ {
2798
+ "epoch": 1.6352459016393444,
2799
+ "grad_norm": 0.3676946759223938,
2800
+ "learning_rate": 4.421697814685869e-07,
2801
+ "loss": 1.1909,
2802
+ "step": 399
2803
+ },
2804
+ {
2805
+ "epoch": 1.639344262295082,
2806
+ "grad_norm": 0.3842563033103943,
2807
+ "learning_rate": 4.325846498627945e-07,
2808
+ "loss": 1.1875,
2809
+ "step": 400
2810
+ },
2811
+ {
2812
+ "epoch": 1.6434426229508197,
2813
+ "grad_norm": 0.35198283195495605,
2814
+ "learning_rate": 4.230947022311441e-07,
2815
+ "loss": 1.1767,
2816
+ "step": 401
2817
+ },
2818
+ {
2819
+ "epoch": 1.6475409836065573,
2820
+ "grad_norm": 0.44072988629341125,
2821
+ "learning_rate": 4.137003754916105e-07,
2822
+ "loss": 1.19,
2823
+ "step": 402
2824
+ },
2825
+ {
2826
+ "epoch": 1.651639344262295,
2827
+ "grad_norm": 0.29806357622146606,
2828
+ "learning_rate": 4.044021021597769e-07,
2829
+ "loss": 0.7754,
2830
+ "step": 403
2831
+ },
2832
+ {
2833
+ "epoch": 1.6557377049180326,
2834
+ "grad_norm": 0.3621821105480194,
2835
+ "learning_rate": 3.952003103289179e-07,
2836
+ "loss": 1.1835,
2837
+ "step": 404
2838
+ },
2839
+ {
2840
+ "epoch": 1.6598360655737705,
2841
+ "grad_norm": 0.37856945395469666,
2842
+ "learning_rate": 3.8609542365029146e-07,
2843
+ "loss": 1.0468,
2844
+ "step": 405
2845
+ },
2846
+ {
2847
+ "epoch": 1.6639344262295082,
2848
+ "grad_norm": 0.38100022077560425,
2849
+ "learning_rate": 3.770878613136372e-07,
2850
+ "loss": 1.2007,
2851
+ "step": 406
2852
+ },
2853
+ {
2854
+ "epoch": 1.668032786885246,
2855
+ "grad_norm": 0.3964683413505554,
2856
+ "learning_rate": 3.681780380278696e-07,
2857
+ "loss": 1.193,
2858
+ "step": 407
2859
+ },
2860
+ {
2861
+ "epoch": 1.6721311475409837,
2862
+ "grad_norm": 0.3703697621822357,
2863
+ "learning_rate": 3.5936636400199313e-07,
2864
+ "loss": 1.0487,
2865
+ "step": 408
2866
+ },
2867
+ {
2868
+ "epoch": 1.6762295081967213,
2869
+ "grad_norm": 0.3408863842487335,
2870
+ "learning_rate": 3.506532449262098e-07,
2871
+ "loss": 0.9107,
2872
+ "step": 409
2873
+ },
2874
+ {
2875
+ "epoch": 1.680327868852459,
2876
+ "grad_norm": 0.373950332403183,
2877
+ "learning_rate": 3.4203908195324486e-07,
2878
+ "loss": 1.1991,
2879
+ "step": 410
2880
+ },
2881
+ {
2882
+ "epoch": 1.6844262295081966,
2883
+ "grad_norm": 0.3467581868171692,
2884
+ "learning_rate": 3.3352427167987536e-07,
2885
+ "loss": 1.0514,
2886
+ "step": 411
2887
+ },
2888
+ {
2889
+ "epoch": 1.6885245901639343,
2890
+ "grad_norm": 0.33601921796798706,
2891
+ "learning_rate": 3.2510920612867284e-07,
2892
+ "loss": 0.9061,
2893
+ "step": 412
2894
+ },
2895
+ {
2896
+ "epoch": 1.6926229508196722,
2897
+ "grad_norm": 0.35385650396347046,
2898
+ "learning_rate": 3.1679427272995304e-07,
2899
+ "loss": 1.0447,
2900
+ "step": 413
2901
+ },
2902
+ {
2903
+ "epoch": 1.6967213114754098,
2904
+ "grad_norm": 0.33906129002571106,
2905
+ "learning_rate": 3.0857985430393837e-07,
2906
+ "loss": 1.0326,
2907
+ "step": 414
2908
+ },
2909
+ {
2910
+ "epoch": 1.7008196721311475,
2911
+ "grad_norm": 0.35674095153808594,
2912
+ "learning_rate": 3.004663290431348e-07,
2913
+ "loss": 0.908,
2914
+ "step": 415
2915
+ },
2916
+ {
2917
+ "epoch": 1.7049180327868854,
2918
+ "grad_norm": 0.3373037576675415,
2919
+ "learning_rate": 2.924540704949175e-07,
2920
+ "loss": 1.046,
2921
+ "step": 416
2922
+ },
2923
+ {
2924
+ "epoch": 1.709016393442623,
2925
+ "grad_norm": 0.3760300874710083,
2926
+ "learning_rate": 2.845434475443351e-07,
2927
+ "loss": 1.191,
2928
+ "step": 417
2929
+ },
2930
+ {
2931
+ "epoch": 1.7131147540983607,
2932
+ "grad_norm": 0.352682501077652,
2933
+ "learning_rate": 2.767348243971235e-07,
2934
+ "loss": 1.1804,
2935
+ "step": 418
2936
+ },
2937
+ {
2938
+ "epoch": 1.7172131147540983,
2939
+ "grad_norm": 0.3687870502471924,
2940
+ "learning_rate": 2.6902856056294057e-07,
2941
+ "loss": 1.2025,
2942
+ "step": 419
2943
+ },
2944
+ {
2945
+ "epoch": 1.721311475409836,
2946
+ "grad_norm": 0.3514711558818817,
2947
+ "learning_rate": 2.6142501083881195e-07,
2948
+ "loss": 1.0661,
2949
+ "step": 420
2950
+ },
2951
+ {
2952
+ "epoch": 1.7254098360655736,
2953
+ "grad_norm": 0.37159159779548645,
2954
+ "learning_rate": 2.539245252927969e-07,
2955
+ "loss": 1.1934,
2956
+ "step": 421
2957
+ },
2958
+ {
2959
+ "epoch": 1.7295081967213115,
2960
+ "grad_norm": 0.3892461955547333,
2961
+ "learning_rate": 2.4652744924787253e-07,
2962
+ "loss": 1.1965,
2963
+ "step": 422
2964
+ },
2965
+ {
2966
+ "epoch": 1.7336065573770492,
2967
+ "grad_norm": 0.33413994312286377,
2968
+ "learning_rate": 2.3923412326603307e-07,
2969
+ "loss": 0.911,
2970
+ "step": 423
2971
+ },
2972
+ {
2973
+ "epoch": 1.737704918032787,
2974
+ "grad_norm": 0.32191577553749084,
2975
+ "learning_rate": 2.3204488313261136e-07,
2976
+ "loss": 0.9267,
2977
+ "step": 424
2978
+ },
2979
+ {
2980
+ "epoch": 1.7418032786885247,
2981
+ "grad_norm": 0.3706655502319336,
2982
+ "learning_rate": 2.2496005984081887e-07,
2983
+ "loss": 1.1904,
2984
+ "step": 425
2985
+ },
2986
+ {
2987
+ "epoch": 1.7459016393442623,
2988
+ "grad_norm": 0.32858824729919434,
2989
+ "learning_rate": 2.1797997957650807e-07,
2990
+ "loss": 0.9078,
2991
+ "step": 426
2992
+ },
2993
+ {
2994
+ "epoch": 1.75,
2995
+ "grad_norm": 0.33270376920700073,
2996
+ "learning_rate": 2.1110496370315258e-07,
2997
+ "loss": 0.9167,
2998
+ "step": 427
2999
+ },
3000
+ {
3001
+ "epoch": 1.7540983606557377,
3002
+ "grad_norm": 0.31928005814552307,
3003
+ "learning_rate": 2.0433532874705369e-07,
3004
+ "loss": 0.9172,
3005
+ "step": 428
3006
+ },
3007
+ {
3008
+ "epoch": 1.7581967213114753,
3009
+ "grad_norm": 0.37498903274536133,
3010
+ "learning_rate": 1.9767138638276616e-07,
3011
+ "loss": 1.1895,
3012
+ "step": 429
3013
+ },
3014
+ {
3015
+ "epoch": 1.762295081967213,
3016
+ "grad_norm": 0.41034555435180664,
3017
+ "learning_rate": 1.911134434187481e-07,
3018
+ "loss": 1.1859,
3019
+ "step": 430
3020
+ },
3021
+ {
3022
+ "epoch": 1.7663934426229508,
3023
+ "grad_norm": 0.3766476511955261,
3024
+ "learning_rate": 1.8466180178323856e-07,
3025
+ "loss": 1.1983,
3026
+ "step": 431
3027
+ },
3028
+ {
3029
+ "epoch": 1.7704918032786885,
3030
+ "grad_norm": 0.36230266094207764,
3031
+ "learning_rate": 1.7831675851035264e-07,
3032
+ "loss": 1.0443,
3033
+ "step": 432
3034
+ },
3035
+ {
3036
+ "epoch": 1.7745901639344264,
3037
+ "grad_norm": 0.37963178753852844,
3038
+ "learning_rate": 1.7207860572640872e-07,
3039
+ "loss": 1.1877,
3040
+ "step": 433
3041
+ },
3042
+ {
3043
+ "epoch": 1.778688524590164,
3044
+ "grad_norm": 0.3309424817562103,
3045
+ "learning_rate": 1.6594763063647822e-07,
3046
+ "loss": 1.0387,
3047
+ "step": 434
3048
+ },
3049
+ {
3050
+ "epoch": 1.7827868852459017,
3051
+ "grad_norm": 0.35661202669143677,
3052
+ "learning_rate": 1.5992411551116304e-07,
3053
+ "loss": 1.185,
3054
+ "step": 435
3055
+ },
3056
+ {
3057
+ "epoch": 1.7868852459016393,
3058
+ "grad_norm": 0.3461817502975464,
3059
+ "learning_rate": 1.5400833767359847e-07,
3060
+ "loss": 1.0481,
3061
+ "step": 436
3062
+ },
3063
+ {
3064
+ "epoch": 1.790983606557377,
3065
+ "grad_norm": 0.3197766840457916,
3066
+ "learning_rate": 1.482005694866867e-07,
3067
+ "loss": 0.9093,
3068
+ "step": 437
3069
+ },
3070
+ {
3071
+ "epoch": 1.7950819672131146,
3072
+ "grad_norm": 0.3562556505203247,
3073
+ "learning_rate": 1.4250107834055725e-07,
3074
+ "loss": 1.0407,
3075
+ "step": 438
3076
+ },
3077
+ {
3078
+ "epoch": 1.7991803278688525,
3079
+ "grad_norm": 0.3429252803325653,
3080
+ "learning_rate": 1.369101266402545e-07,
3081
+ "loss": 0.9023,
3082
+ "step": 439
3083
+ },
3084
+ {
3085
+ "epoch": 1.8032786885245902,
3086
+ "grad_norm": 0.3334152102470398,
3087
+ "learning_rate": 1.3142797179365868e-07,
3088
+ "loss": 0.9164,
3089
+ "step": 440
3090
+ },
3091
+ {
3092
+ "epoch": 1.8073770491803278,
3093
+ "grad_norm": 0.36567139625549316,
3094
+ "learning_rate": 1.2605486619963276e-07,
3095
+ "loss": 1.1944,
3096
+ "step": 441
3097
+ },
3098
+ {
3099
+ "epoch": 1.8114754098360657,
3100
+ "grad_norm": 0.36539405584335327,
3101
+ "learning_rate": 1.207910572364046e-07,
3102
+ "loss": 1.1742,
3103
+ "step": 442
3104
+ },
3105
+ {
3106
+ "epoch": 1.8155737704918034,
3107
+ "grad_norm": 0.34080713987350464,
3108
+ "learning_rate": 1.1563678725017513e-07,
3109
+ "loss": 1.0656,
3110
+ "step": 443
3111
+ },
3112
+ {
3113
+ "epoch": 1.819672131147541,
3114
+ "grad_norm": 0.3428449332714081,
3115
+ "learning_rate": 1.1059229354396128e-07,
3116
+ "loss": 1.0468,
3117
+ "step": 444
3118
+ },
3119
+ {
3120
+ "epoch": 1.8237704918032787,
3121
+ "grad_norm": 0.3538898825645447,
3122
+ "learning_rate": 1.056578083666726e-07,
3123
+ "loss": 1.0458,
3124
+ "step": 445
3125
+ },
3126
+ {
3127
+ "epoch": 1.8278688524590163,
3128
+ "grad_norm": 0.3626593053340912,
3129
+ "learning_rate": 1.008335589024148e-07,
3130
+ "loss": 1.0576,
3131
+ "step": 446
3132
+ },
3133
+ {
3134
+ "epoch": 1.831967213114754,
3135
+ "grad_norm": 0.36676186323165894,
3136
+ "learning_rate": 9.611976726003392e-08,
3137
+ "loss": 1.1887,
3138
+ "step": 447
3139
+ },
3140
+ {
3141
+ "epoch": 1.8360655737704918,
3142
+ "grad_norm": 0.3630443215370178,
3143
+ "learning_rate": 9.151665046288727e-08,
3144
+ "loss": 1.1901,
3145
+ "step": 448
3146
+ },
3147
+ {
3148
+ "epoch": 1.8401639344262295,
3149
+ "grad_norm": 0.37578243017196655,
3150
+ "learning_rate": 8.702442043885512e-08,
3151
+ "loss": 1.1851,
3152
+ "step": 449
3153
+ },
3154
+ {
3155
+ "epoch": 1.8442622950819674,
3156
+ "grad_norm": 0.3619200587272644,
3157
+ "learning_rate": 8.264328401057897e-08,
3158
+ "loss": 1.1918,
3159
+ "step": 450
3160
+ },
3161
+ {
3162
+ "epoch": 1.848360655737705,
3163
+ "grad_norm": 0.3683510422706604,
3164
+ "learning_rate": 7.837344288594395e-08,
3165
+ "loss": 1.1876,
3166
+ "step": 451
3167
+ },
3168
+ {
3169
+ "epoch": 1.8524590163934427,
3170
+ "grad_norm": 0.34541499614715576,
3171
+ "learning_rate": 7.421509364878927e-08,
3172
+ "loss": 1.0548,
3173
+ "step": 452
3174
+ },
3175
+ {
3176
+ "epoch": 1.8565573770491803,
3177
+ "grad_norm": 0.3492802083492279,
3178
+ "learning_rate": 7.016842774985821e-08,
3179
+ "loss": 1.1917,
3180
+ "step": 453
3181
+ },
3182
+ {
3183
+ "epoch": 1.860655737704918,
3184
+ "grad_norm": 0.33986085653305054,
3185
+ "learning_rate": 6.623363149798529e-08,
3186
+ "loss": 1.0613,
3187
+ "step": 454
3188
+ },
3189
+ {
3190
+ "epoch": 1.8647540983606556,
3191
+ "grad_norm": 0.3286001980304718,
3192
+ "learning_rate": 6.241088605151518e-08,
3193
+ "loss": 0.912,
3194
+ "step": 455
3195
+ },
3196
+ {
3197
+ "epoch": 1.8688524590163933,
3198
+ "grad_norm": 0.3603418469429016,
3199
+ "learning_rate": 5.870036740996565e-08,
3200
+ "loss": 1.1989,
3201
+ "step": 456
3202
+ },
3203
+ {
3204
+ "epoch": 1.8729508196721312,
3205
+ "grad_norm": 0.35810166597366333,
3206
+ "learning_rate": 5.5102246405922823e-08,
3207
+ "loss": 1.0475,
3208
+ "step": 457
3209
+ },
3210
+ {
3211
+ "epoch": 1.8770491803278688,
3212
+ "grad_norm": 0.3316009044647217,
3213
+ "learning_rate": 5.161668869717584e-08,
3214
+ "loss": 0.9209,
3215
+ "step": 458
3216
+ },
3217
+ {
3218
+ "epoch": 1.8811475409836067,
3219
+ "grad_norm": 0.3086305856704712,
3220
+ "learning_rate": 4.824385475909049e-08,
3221
+ "loss": 0.909,
3222
+ "step": 459
3223
+ },
3224
+ {
3225
+ "epoch": 1.8852459016393444,
3226
+ "grad_norm": 0.39284205436706543,
3227
+ "learning_rate": 4.49838998772209e-08,
3228
+ "loss": 1.1894,
3229
+ "step": 460
3230
+ },
3231
+ {
3232
+ "epoch": 1.889344262295082,
3233
+ "grad_norm": 0.35450220108032227,
3234
+ "learning_rate": 4.183697414016058e-08,
3235
+ "loss": 1.1992,
3236
+ "step": 461
3237
+ },
3238
+ {
3239
+ "epoch": 1.8934426229508197,
3240
+ "grad_norm": 0.2877340614795685,
3241
+ "learning_rate": 3.8803222432630685e-08,
3242
+ "loss": 0.7784,
3243
+ "step": 462
3244
+ },
3245
+ {
3246
+ "epoch": 1.8975409836065573,
3247
+ "grad_norm": 0.35768765211105347,
3248
+ "learning_rate": 3.5882784428812324e-08,
3249
+ "loss": 1.179,
3250
+ "step": 463
3251
+ },
3252
+ {
3253
+ "epoch": 1.901639344262295,
3254
+ "grad_norm": 0.3440133333206177,
3255
+ "learning_rate": 3.3075794585912534e-08,
3256
+ "loss": 1.0472,
3257
+ "step": 464
3258
+ },
3259
+ {
3260
+ "epoch": 1.9057377049180326,
3261
+ "grad_norm": 0.35129430890083313,
3262
+ "learning_rate": 3.038238213797673e-08,
3263
+ "loss": 1.057,
3264
+ "step": 465
3265
+ },
3266
+ {
3267
+ "epoch": 1.9098360655737705,
3268
+ "grad_norm": 0.3321053981781006,
3269
+ "learning_rate": 2.7802671089937338e-08,
3270
+ "loss": 0.9022,
3271
+ "step": 466
3272
+ },
3273
+ {
3274
+ "epoch": 1.9139344262295082,
3275
+ "grad_norm": 0.377127468585968,
3276
+ "learning_rate": 2.5336780211905055e-08,
3277
+ "loss": 1.1916,
3278
+ "step": 467
3279
+ },
3280
+ {
3281
+ "epoch": 1.918032786885246,
3282
+ "grad_norm": 0.3552931845188141,
3283
+ "learning_rate": 2.2984823033700142e-08,
3284
+ "loss": 1.0472,
3285
+ "step": 468
3286
+ },
3287
+ {
3288
+ "epoch": 1.9221311475409837,
3289
+ "grad_norm": 0.3283768594264984,
3290
+ "learning_rate": 2.0746907839626075e-08,
3291
+ "loss": 1.0466,
3292
+ "step": 469
3293
+ },
3294
+ {
3295
+ "epoch": 1.9262295081967213,
3296
+ "grad_norm": 0.36822226643562317,
3297
+ "learning_rate": 1.862313766348406e-08,
3298
+ "loss": 1.1895,
3299
+ "step": 470
3300
+ },
3301
+ {
3302
+ "epoch": 1.930327868852459,
3303
+ "grad_norm": 0.34936660528182983,
3304
+ "learning_rate": 1.6613610283828797e-08,
3305
+ "loss": 1.1862,
3306
+ "step": 471
3307
+ },
3308
+ {
3309
+ "epoch": 1.9344262295081966,
3310
+ "grad_norm": 0.3748954236507416,
3311
+ "learning_rate": 1.4718418219468178e-08,
3312
+ "loss": 1.1841,
3313
+ "step": 472
3314
+ },
3315
+ {
3316
+ "epoch": 1.9385245901639343,
3317
+ "grad_norm": 0.33568257093429565,
3318
+ "learning_rate": 1.2937648725201424e-08,
3319
+ "loss": 1.0695,
3320
+ "step": 473
3321
+ },
3322
+ {
3323
+ "epoch": 1.9426229508196722,
3324
+ "grad_norm": 0.3411601781845093,
3325
+ "learning_rate": 1.127138378780368e-08,
3326
+ "loss": 1.0399,
3327
+ "step": 474
3328
+ },
3329
+ {
3330
+ "epoch": 1.9467213114754098,
3331
+ "grad_norm": 0.33146122097969055,
3332
+ "learning_rate": 9.719700122250985e-09,
3333
+ "loss": 1.0427,
3334
+ "step": 475
3335
+ },
3336
+ {
3337
+ "epoch": 1.9508196721311475,
3338
+ "grad_norm": 0.384295791387558,
3339
+ "learning_rate": 8.282669168188095e-09,
3340
+ "loss": 1.1944,
3341
+ "step": 476
3342
+ },
3343
+ {
3344
+ "epoch": 1.9549180327868854,
3345
+ "grad_norm": 0.3183731734752655,
3346
+ "learning_rate": 6.960357086639169e-09,
3347
+ "loss": 0.9175,
3348
+ "step": 477
3349
+ },
3350
+ {
3351
+ "epoch": 1.959016393442623,
3352
+ "grad_norm": 0.4038081765174866,
3353
+ "learning_rate": 5.752824756961872e-09,
3354
+ "loss": 1.1902,
3355
+ "step": 478
3356
+ },
3357
+ {
3358
+ "epoch": 1.9631147540983607,
3359
+ "grad_norm": 0.35792505741119385,
3360
+ "learning_rate": 4.660127774045175e-09,
3361
+ "loss": 1.0414,
3362
+ "step": 479
3363
+ },
3364
+ {
3365
+ "epoch": 1.9672131147540983,
3366
+ "grad_norm": 0.35738375782966614,
3367
+ "learning_rate": 3.682316445748346e-09,
3368
+ "loss": 1.1764,
3369
+ "step": 480
3370
+ },
3371
+ {
3372
+ "epoch": 1.971311475409836,
3373
+ "grad_norm": 0.3557555675506592,
3374
+ "learning_rate": 2.819435790586411e-09,
3375
+ "loss": 1.075,
3376
+ "step": 481
3377
+ },
3378
+ {
3379
+ "epoch": 1.9754098360655736,
3380
+ "grad_norm": 0.3181816041469574,
3381
+ "learning_rate": 2.0715255356559826e-09,
3382
+ "loss": 0.918,
3383
+ "step": 482
3384
+ },
3385
+ {
3386
+ "epoch": 1.9795081967213115,
3387
+ "grad_norm": 0.3422686457633972,
3388
+ "learning_rate": 1.4386201148072766e-09,
3389
+ "loss": 1.1884,
3390
+ "step": 483
3391
+ },
3392
+ {
3393
+ "epoch": 1.9836065573770492,
3394
+ "grad_norm": 0.3369675278663635,
3395
+ "learning_rate": 9.20748667058713e-10,
3396
+ "loss": 1.0697,
3397
+ "step": 484
3398
+ },
3399
+ {
3400
+ "epoch": 1.987704918032787,
3401
+ "grad_norm": 0.3343188762664795,
3402
+ "learning_rate": 5.179350352541024e-10,
3403
+ "loss": 1.0364,
3404
+ "step": 485
3405
+ },
3406
+ {
3407
+ "epoch": 1.9918032786885247,
3408
+ "grad_norm": 0.3667546808719635,
3409
+ "learning_rate": 2.301977649668552e-10,
3410
+ "loss": 1.1884,
3411
+ "step": 486
3412
+ },
3413
+ {
3414
+ "epoch": 1.9959016393442623,
3415
+ "grad_norm": 0.36699703335762024,
3416
+ "learning_rate": 5.755010364455471e-11,
3417
+ "loss": 1.1824,
3418
+ "step": 487
3419
+ },
3420
+ {
3421
+ "epoch": 2.0,
3422
+ "grad_norm": 0.36600202322006226,
3423
+ "learning_rate": 0.0,
3424
+ "loss": 1.1781,
3425
+ "step": 488
3426
+ }
3427
+ ],
3428
+ "logging_steps": 1.0,
3429
+ "max_steps": 488,
3430
+ "num_input_tokens_seen": 0,
3431
+ "num_train_epochs": 2,
3432
+ "save_steps": 500,
3433
+ "stateful_callbacks": {
3434
+ "TrainerControl": {
3435
+ "args": {
3436
+ "should_epoch_stop": false,
3437
+ "should_evaluate": false,
3438
+ "should_log": false,
3439
+ "should_save": true,
3440
+ "should_training_stop": true
3441
+ },
3442
+ "attributes": {}
3443
+ }
3444
+ },
3445
+ "total_flos": 1.5344105028163994e+18,
3446
+ "train_batch_size": 1,
3447
+ "trial_name": null,
3448
+ "trial_params": null
3449
+ }
checkpoint-488/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dd386288265f283057f88dba0dab88b303bbbbfdb682a79fd3381039872064d1
3
+ size 5816