File size: 570 Bytes
092e059
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
base_model: mistralai/Mistral-7B-Instruct-v0.2
gate_mode: hidden # one of "hidden", "cheap_embed", or "random"
dtype: bfloat16 # output dtype (float32, float16, or bfloat16)
experts:
  - source_model: SanjiWatsuki/Kunoichi-DPO-v2-7B
    positive_prompts:
        - "roleplay"
  - source_model: mistralai/Mistral-7B-Instruct-v0.2
    positive_prompts:
        - "chat"
#"{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}"