Meta-Llama-3.1-8B-Instruct-TIES / mergekit_config.yml
T145's picture
Upload folder using huggingface_hub
62a8c4f verified
raw
history blame
386 Bytes
base_model: unsloth/Meta-Llama-3.1-8B
dtype: bfloat16
merge_method: ties
parameters:
density: 1.0
weight: 1.0
slices:
- sources:
- layer_range: [0, 32]
model: unsloth/Meta-Llama-3.1-8B-Instruct
parameters:
density: 1.0
weight: 1.0
- layer_range: [0, 32]
model: unsloth/Meta-Llama-3.1-8B
tokenizer_source: unsloth/Meta-Llama-3.1-8B-Instruct