{ | |
"model_type": "gpt2", | |
"special_tokens_map_file": null, | |
"tokenizer_class": "GPT2Tokenizer", | |
"unk_token": "<|endoftext|>", | |
"bos_token": "<|endoftext|>", | |
"eos_token": "<|endoftext|>", | |
"pad_token": "<|endoftext|>", | |
"clean_up_tokenization_spaces": true | |
} |