Update config.json
Browse files- config.json +1 -1
config.json
CHANGED
@@ -34,7 +34,7 @@
|
|
34 |
"vocab_size": 32064,
|
35 |
"prompt_wrapper": "phi3",
|
36 |
"prompt_format": "<|user|>\n{context_passage}\n{question}\n<|end|>\n<|assistant|>",
|
37 |
-
"prompt_format_dict": "system_start": "<|system|>\n", "system_stop": "<|end|>\n", "main_start": "<|user|>\n", "main_stop": "<|end|>\n", "start_llm_response": "<|assistant|>",
|
38 |
"tokenizer_local": "tokenizer_phi3.json",
|
39 |
"tokenizer_config": {"bos_id": [1], "bos_token": ["<s>"], "eos_id": [32000,32001,32007], "eos_token": ["<|endoftext|>","<|assistant|>","<|end|>"},
|
40 |
"model_parent": "microsoft/Phi-3-mini-4k-instruct",
|
|
|
34 |
"vocab_size": 32064,
|
35 |
"prompt_wrapper": "phi3",
|
36 |
"prompt_format": "<|user|>\n{context_passage}\n{question}\n<|end|>\n<|assistant|>",
|
37 |
+
"prompt_format_dict": {"system_start": "<|system|>\n", "system_stop": "<|end|>\n", "main_start": "<|user|>\n", "main_stop": "<|end|>\n", "start_llm_response": "<|assistant|>"},
|
38 |
"tokenizer_local": "tokenizer_phi3.json",
|
39 |
"tokenizer_config": {"bos_id": [1], "bos_token": ["<s>"], "eos_id": [32000,32001,32007], "eos_token": ["<|endoftext|>","<|assistant|>","<|end|>"},
|
40 |
"model_parent": "microsoft/Phi-3-mini-4k-instruct",
|