tolga-ozturk commited on
Commit
787baf9
·
1 Parent(s): 1669290

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -2
config.json CHANGED
@@ -2,7 +2,7 @@
2
  "_name_or_path": "THUMT/mGPT",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
- "GPT2LMHeadModel"
6
  ],
7
  "attn_pdrop": 0.1,
8
  "bos_token_id": 1,
@@ -16,7 +16,7 @@
16
  "n_embd": 1024,
17
  "n_head": 16,
18
  "n_inner": 4096,
19
- "n_layer": 24,
20
  "n_positions": 1024,
21
  "reorder_and_upcast_attn": false,
22
  "resid_pdrop": 0.1,
@@ -27,6 +27,7 @@
27
  "summary_proj_to_labels": true,
28
  "summary_type": "cls_index",
29
  "summary_use_proj": true,
 
30
  "transformers_version": "4.18.0",
31
  "use_cache": true,
32
  "vocab_size": 250100
 
2
  "_name_or_path": "THUMT/mGPT",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
+ "ModelNSP"
6
  ],
7
  "attn_pdrop": 0.1,
8
  "bos_token_id": 1,
 
16
  "n_embd": 1024,
17
  "n_head": 16,
18
  "n_inner": 4096,
19
+ "n_layer": 27,
20
  "n_positions": 1024,
21
  "reorder_and_upcast_attn": false,
22
  "resid_pdrop": 0.1,
 
27
  "summary_proj_to_labels": true,
28
  "summary_type": "cls_index",
29
  "summary_use_proj": true,
30
+ "torch_dtype": "float32",
31
  "transformers_version": "4.18.0",
32
  "use_cache": true,
33
  "vocab_size": 250100