Max200293 commited on
Commit
b2f865f
·
1 Parent(s): 70d71f1

Upload tokenizer

Browse files
Files changed (3) hide show
  1. added_tokens.json +2 -2
  2. tokenizer_config.json +4 -4
  3. vocab.json +9 -10
added_tokens.json CHANGED
@@ -1,4 +1,4 @@
1
  {
2
- "</s>": 41,
3
- "<s>": 40
4
  }
 
1
  {
2
+ "</s>": 40,
3
+ "<s>": 39
4
  }
tokenizer_config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "added_tokens_decoder": {
3
- "38": {
4
  "content": "[UNK]",
5
  "lstrip": true,
6
  "normalized": false,
@@ -8,7 +8,7 @@
8
  "single_word": false,
9
  "special": false
10
  },
11
- "39": {
12
  "content": "[PAD]",
13
  "lstrip": true,
14
  "normalized": false,
@@ -16,7 +16,7 @@
16
  "single_word": false,
17
  "special": false
18
  },
19
- "40": {
20
  "content": "<s>",
21
  "lstrip": false,
22
  "normalized": true,
@@ -24,7 +24,7 @@
24
  "single_word": false,
25
  "special": true
26
  },
27
- "41": {
28
  "content": "</s>",
29
  "lstrip": false,
30
  "normalized": true,
 
1
  {
2
  "added_tokens_decoder": {
3
+ "37": {
4
  "content": "[UNK]",
5
  "lstrip": true,
6
  "normalized": false,
 
8
  "single_word": false,
9
  "special": false
10
  },
11
+ "38": {
12
  "content": "[PAD]",
13
  "lstrip": true,
14
  "normalized": false,
 
16
  "single_word": false,
17
  "special": false
18
  },
19
+ "39": {
20
  "content": "<s>",
21
  "lstrip": false,
22
  "normalized": true,
 
24
  "single_word": false,
25
  "special": true
26
  },
27
+ "40": {
28
  "content": "</s>",
29
  "lstrip": false,
30
  "normalized": true,
vocab.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
- "[PAD]": 39,
3
- "[UNK]": 38,
4
  "\\": 1,
5
  "_": 2,
6
  "a": 3,
@@ -31,12 +31,11 @@
31
  "z": 28,
32
  "|": 0,
33
  "£": 29,
34
- "å": 30,
35
- "æ": 31,
36
- "è": 32,
37
- "é": 33,
38
- "ò": 34,
39
- "ó": 35,
40
- "ø": 36,
41
- "ü": 37
42
  }
 
1
  {
2
+ "[PAD]": 38,
3
+ "[UNK]": 37,
4
  "\\": 1,
5
  "_": 2,
6
  "a": 3,
 
31
  "z": 28,
32
  "|": 0,
33
  "£": 29,
34
+ "æ": 30,
35
+ "è": 31,
36
+ "é": 32,
37
+ "ò": 33,
38
+ "ó": 34,
39
+ "ø": 35,
40
+ "ü": 36
 
41
  }