models: | |
# Primary RP and Interaction Models | |
- model: Gryphe/Pantheon-RP-Pure-1.6.2-22b-Small | |
parameters: | |
weight: 0.26 | |
density: 0.74 | |
- model: Kaoeiri/MS_Moingooistral-2409-22B | |
parameters: | |
weight: 0.23 | |
density: 0.75 | |
# Versatile, High-Performance Models | |
- model: anthracite-org/magnum-v4-22b | |
parameters: | |
weight: 1.0 | |
density: 0.86 | |
- model: DigitalSouls/BlackSheep-DigitalSoul-22B | |
parameters: | |
weight: 0.18 | |
density: 0.68 | |
- model: InferenceIllusionist/SorcererLM-22B | |
parameters: | |
weight: 0.19 | |
density: 0.70 | |
- model: Kaoeiri/Magnum-v4-Cydonia-vXXX-22B | |
parameters: | |
weight: 0.22 | |
density: 0.68 | |
- model: crestf411/MS-sunfall-v0.7.0 | |
parameters: | |
weight: 0.21 | |
density: 0.70 | |
# Supporting and Adapter-Based Models | |
- model: Kaoeiri/MS_a-coolyte-2409-22B | |
parameters: | |
weight: 0.22 | |
density: 0.69 | |
- model: Kaoeiri/MS_fujin-2409-22B | |
parameters: | |
weight: 0.16 | |
density: 0.68 | |
- model: Kaoeiri/MS_springydragon-2409-22B | |
parameters: | |
weight: 0.17 | |
density: 0.71 | |
- model: Kaoeiri/MS_dampf-2409-22B | |
parameters: | |
weight: 0.17 | |
density: 0.71 | |
- model: Kaoeiri/MS-Physician-2409-22B | |
parameters: | |
weight: 0.10 | |
density: 0.67 | |
- model: hf-100/Mistral-Small-Spellbound-StoryWriter-22B-instruct-0.2-chkpt-200-16-bit | |
parameters: | |
weight: 0.18 | |
density: 0.67 | |
# Secondary and Low-Priority RP Models | |
- model: Gryphe/Pantheon-RP-1.6.2-22b-Small | |
parameters: | |
weight: 0.13 | |
density: 0.65 | |
- model: ArliAI/Mistral-Small-22B-ArliAI-RPMax-v1.1 | |
parameters: | |
weight: 0.12 | |
density: 0.63 | |
- model: Darkknight535/MS-Moonlight-22B-v3 | |
parameters: | |
weight: 0.11 | |
density: 0.63 | |
# Additional Models | |
- model: spow12/ChatWaifu_v2.0_22B | |
parameters: | |
weight: 0.27 | |
density: 0.7 | |
- model: Saxo/Linkbricks-Horizon-AI-Japanese-Superb-V1-22B | |
parameters: | |
weight: 0.2 | |
density: 0.58 | |
- model: allura-org/MS-Meadowlark-22B | |
parameters: | |
weight: 0.27 | |
density: 0.71 | |
merge_method: dare_ties | |
base_model: unsloth/Mistral-Small-Instruct-2409 | |
parameters: | |
density: 0.85 | |
epsilon: 0.08 | |
lambda: 1.25 | |
dtype: bfloat16 | |
tokenizer_source: union | |