base_model: Qwen/QwQ-32B-Preview dtype: bfloat16 merge_method: dare_ties parameters: int8_mask: 1.0 slices: - sources: - layer_range: [0, 64] model: Qwen/QwQ-32B-Preview - layer_range: [0, 64] model: Saxo/Linkbricks-Horizon-AI-Japanese-Base-32B parameters: density: 0.53 weight: 0.3 - layer_range: [0, 64] model: huihui-ai/QwQ-32B-Preview-abliterated parameters: density: 0.53 weight: 0.4 - layer_range: [0, 64] model: nitky/EZO-QwQ-32B-Preview parameters: density: 0.53 weight: 0.3 tokenizer_source: union