tonyhong commited on
Commit
9991912
·
1 Parent(s): cfc87ff

update config

Browse files
Files changed (1) hide show
  1. config.json +10 -4
config.json CHANGED
@@ -3,20 +3,26 @@
3
  "LtgBertForMaskedLM"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
6
- "classifier_dropout": null,
 
 
 
 
 
7
  "hidden_dropout_prob": 0.1,
8
  "hidden_size": 384,
9
  "intermediate_size": 1024,
10
  "layer_norm_eps": 1e-07,
11
- "mask_token_id": 4,
12
  "max_position_embeddings": 512,
13
- "model_type": "bert",
14
  "num_attention_heads": 6,
15
  "num_hidden_layers": 12,
16
  "output_all_encoded_layers": true,
17
  "pad_token_id": 3,
 
18
  "position_bucket_size": 32,
19
  "torch_dtype": "float32",
20
- "transformers_version": "4.44.2",
21
  "vocab_size": 6144
22
  }
 
 
3
  "LtgBertForMaskedLM"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
6
+ "auto_map": {
7
+ "AutoConfig": "configuration_ltgbert.LtgBertConfig",
8
+ "AutoModelForMaskedLM": "modeling_ltgbert.LtgBertForMaskedLM",
9
+ "AutoModelForSequenceClassification": "modeling_ltgbert.LtgBertForSequenceClassification"
10
+ },
11
+ "classifier_dropout": 0.2,
12
  "hidden_dropout_prob": 0.1,
13
  "hidden_size": 384,
14
  "intermediate_size": 1024,
15
  "layer_norm_eps": 1e-07,
 
16
  "max_position_embeddings": 512,
17
+ "model_type": "ltgbert",
18
  "num_attention_heads": 6,
19
  "num_hidden_layers": 12,
20
  "output_all_encoded_layers": true,
21
  "pad_token_id": 3,
22
+ "mask_token_id": 4,
23
  "position_bucket_size": 32,
24
  "torch_dtype": "float32",
25
+ "transformers_version": "4.26.0",
26
  "vocab_size": 6144
27
  }
28
+