models: | |
- model: meta-llama/Meta-Llama-3-8B # no parameters necessary for base model | |
- model: bineric/NorskGPT-Llama3-8b | |
parameters: | |
density: 0.5 # fraction of weights in differences from the base model to retain | |
weight: # weight gradient | |
- filter: mlp | |
value: 0.5 | |
- value: 0 | |
- model: NousResearch/Hermes-2-Theta-Llama-3-8B | |
parameters: | |
density: 0.5 | |
weight: 0.5 | |
merge_method: dare_ties | |
base_model: meta-llama/Meta-Llama-3-8B | |
parameters: | |
normalize: true | |
int8_mask: true | |
dtype: bfloat16 | |