vocab_size of model back to 51200
Browse files- config.json +1 -1
config.json
CHANGED
@@ -27,5 +27,5 @@
|
|
27 |
"tie_word_embeddings": false,
|
28 |
"torch_dtype": "float32",
|
29 |
"transformers_version": "4.35.2",
|
30 |
-
"vocab_size":
|
31 |
}
|
|
|
27 |
"tie_word_embeddings": false,
|
28 |
"torch_dtype": "float32",
|
29 |
"transformers_version": "4.35.2",
|
30 |
+
"vocab_size": 51200
|
31 |
}
|