File size: 233 Bytes
f123184 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 |
{
"clean_up_tokenization_spaces": true,
"model_max_length": 512,
"special_tokens": [
"<s>",
"<pad>",
"</s>",
"<unk>",
"<cls>",
"<sep>",
"<mask>"
],
"tokenizer_class": "PreTrainedTokenizerFast"
}
|