File size: 343 Bytes
e34f376
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
models:
  - model: nbeerbower/flammen8-mistral-7B
  - model: nbeerbower/flammen5-mistral-7B
    parameters:
      density: 0.5
      weight: 0.5
  - model: nbeerbower/flammen3X
    parameters:
      density: 0.5
      weight: 0.3
merge_method: dare_ties
base_model: nbeerbower/flammen8-mistral-7B
parameters:
  normalize: true
dtype: bfloat16