tolga-ozturk commited on
Commit
4cd3dd9
·
1 Parent(s): d7d412e

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -5
config.json CHANGED
@@ -1,9 +1,8 @@
1
  {
2
- "_name_or_path": "tolga-ozturk/mGPT-nsp",
3
  "_num_labels": 2,
4
  "activation_function": "gelu_new",
5
  "architectures": [
6
- "ModelNSP"
7
  ],
8
  "attn_pdrop": 0.1,
9
  "bos_token_id": 1,
@@ -12,12 +11,12 @@
12
  "gradient_checkpointing": false,
13
  "initializer_range": 0.02,
14
  "layer_norm_epsilon": 1e-05,
15
- "model_type": "ModelNSP",
16
  "n_ctx": 1024,
17
  "n_embd": 1024,
18
  "n_head": 16,
19
  "n_inner": 4096,
20
- "n_layer": 27,
21
  "n_positions": 1024,
22
  "reorder_and_upcast_attn": false,
23
  "resid_pdrop": 0.1,
@@ -28,7 +27,6 @@
28
  "summary_proj_to_labels": true,
29
  "summary_type": "cls_index",
30
  "summary_use_proj": true,
31
- "torch_dtype": "float32",
32
  "transformers_version": "4.18.0",
33
  "use_cache": true,
34
  "vocab_size": 250100
 
1
  {
 
2
  "_num_labels": 2,
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
+ "GPT2LMHeadModel"
6
  ],
7
  "attn_pdrop": 0.1,
8
  "bos_token_id": 1,
 
11
  "gradient_checkpointing": false,
12
  "initializer_range": 0.02,
13
  "layer_norm_epsilon": 1e-05,
14
+ "model_type": "gpt2",
15
  "n_ctx": 1024,
16
  "n_embd": 1024,
17
  "n_head": 16,
18
  "n_inner": 4096,
19
+ "n_layer": 24,
20
  "n_positions": 1024,
21
  "reorder_and_upcast_attn": false,
22
  "resid_pdrop": 0.1,
 
27
  "summary_proj_to_labels": true,
28
  "summary_type": "cls_index",
29
  "summary_use_proj": true,
 
30
  "transformers_version": "4.18.0",
31
  "use_cache": true,
32
  "vocab_size": 250100