zhichao yang commited on
Commit
5bda3fe
·
1 Parent(s): 312b7b8

Delete config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -44
config.json DELETED
@@ -1,44 +0,0 @@
1
- {
2
- "_name_or_path": "yikuan8/Clinical-Longformer",
3
- "architectures": [
4
- "LongformerForMaskedLM"
5
- ],
6
- "attention_mode": "longformer",
7
- "attention_probs_dropout_prob": 0.1,
8
- "attention_window": [
9
- 512,
10
- 512,
11
- 512,
12
- 512,
13
- 512,
14
- 512,
15
- 512,
16
- 512,
17
- 512,
18
- 512,
19
- 512,
20
- 512
21
- ],
22
- "bos_token_id": 0,
23
- "eos_token_id": 2,
24
- "gradient_checkpointing": false,
25
- "hidden_act": "gelu",
26
- "hidden_dropout_prob": 0.1,
27
- "hidden_size": 768,
28
- "ignore_attention_mask": false,
29
- "initializer_range": 0.02,
30
- "intermediate_size": 3072,
31
- "layer_norm_eps": 1e-05,
32
- "max_position_embeddings": 16386,
33
- "model_type": "longformer",
34
- "num_attention_heads": 12,
35
- "num_hidden_layers": 12,
36
- "pad_token_id": 1,
37
- "position_embedding_type": "absolute",
38
- "sep_token_id": 2,
39
- "torch_dtype": "float32",
40
- "transformers_version": "4.8.1",
41
- "type_vocab_size": 1,
42
- "use_cache": true,
43
- "vocab_size": 50265
44
- }