winglian commited on
Commit
9330d5c
·
verified ·
1 Parent(s): ecef980

Upload folder using huggingface_hub

Browse files
Files changed (2) hide show
  1. tokenizer.json +1 -1
  2. tokenizer_config.json +2 -0
tokenizer.json CHANGED
@@ -131,7 +131,7 @@
131
  },
132
  {
133
  "id": 14,
134
- "content": "<|im_start|>>",
135
  "single_word": false,
136
  "lstrip": false,
137
  "rstrip": false,
 
131
  },
132
  {
133
  "id": 14,
134
+ "content": "<|im_start|>",
135
  "single_word": false,
136
  "lstrip": false,
137
  "rstrip": false,
tokenizer_config.json CHANGED
@@ -8005,9 +8005,11 @@
8005
  }
8006
  },
8007
  "bos_token": "<s>",
 
8008
  "clean_up_tokenization_spaces": true,
8009
  "eos_token": "</s>",
8010
  "model_max_length": 1000000000000000019884624838656,
 
8011
  "tokenizer_class": "GPT2Tokenizer",
8012
  "unk_token": "<unk>"
8013
  }
 
8005
  }
8006
  },
8007
  "bos_token": "<s>",
8008
+ "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
8009
  "clean_up_tokenization_spaces": true,
8010
  "eos_token": "</s>",
8011
  "model_max_length": 1000000000000000019884624838656,
8012
+ "pad_token": "<pad>",
8013
  "tokenizer_class": "GPT2Tokenizer",
8014
  "unk_token": "<unk>"
8015
  }