Roxysun commited on
Commit
b8a20e2
·
1 Parent(s): e650add

Upload tokenizer

Browse files
Files changed (3) hide show
  1. added_tokens.json +2 -2
  2. tokenizer_config.json +4 -4
  3. vocab.json +3 -4
added_tokens.json CHANGED
@@ -1,4 +1,4 @@
1
  {
2
- "</s>": 35,
3
- "<s>": 34
4
  }
 
1
  {
2
+ "</s>": 34,
3
+ "<s>": 33
4
  }
tokenizer_config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "added_tokens_decoder": {
3
- "32": {
4
  "content": "[UNK]",
5
  "lstrip": true,
6
  "normalized": false,
@@ -8,7 +8,7 @@
8
  "single_word": false,
9
  "special": false
10
  },
11
- "33": {
12
  "content": "[PAD]",
13
  "lstrip": true,
14
  "normalized": false,
@@ -16,7 +16,7 @@
16
  "single_word": false,
17
  "special": false
18
  },
19
- "34": {
20
  "content": "<s>",
21
  "lstrip": false,
22
  "normalized": true,
@@ -24,7 +24,7 @@
24
  "single_word": false,
25
  "special": true
26
  },
27
- "35": {
28
  "content": "</s>",
29
  "lstrip": false,
30
  "normalized": true,
 
1
  {
2
  "added_tokens_decoder": {
3
+ "31": {
4
  "content": "[UNK]",
5
  "lstrip": true,
6
  "normalized": false,
 
8
  "single_word": false,
9
  "special": false
10
  },
11
+ "32": {
12
  "content": "[PAD]",
13
  "lstrip": true,
14
  "normalized": false,
 
16
  "single_word": false,
17
  "special": false
18
  },
19
+ "33": {
20
  "content": "<s>",
21
  "lstrip": false,
22
  "normalized": true,
 
24
  "single_word": false,
25
  "special": true
26
  },
27
+ "34": {
28
  "content": "</s>",
29
  "lstrip": false,
30
  "normalized": true,
vocab.json CHANGED
@@ -3,8 +3,8 @@
3
  "'": 2,
4
  ".": 3,
5
  "?": 4,
6
- "[PAD]": 33,
7
- "[UNK]": 32,
8
  "a": 5,
9
  "b": 6,
10
  "c": 7,
@@ -31,6 +31,5 @@
31
  "x": 28,
32
  "y": 29,
33
  "z": 30,
34
- "|": 0,
35
- "å": 31
36
  }
 
3
  "'": 2,
4
  ".": 3,
5
  "?": 4,
6
+ "[PAD]": 32,
7
+ "[UNK]": 31,
8
  "a": 5,
9
  "b": 6,
10
  "c": 7,
 
31
  "x": 28,
32
  "y": 29,
33
  "z": 30,
34
+ "|": 0
 
35
  }