jimboHsueh commited on
Commit
6ba0957
·
1 Parent(s): f687910

End of training

Browse files
Files changed (3) hide show
  1. README.md +19 -0
  2. adapter_config.json +5 -5
  3. adapter_model.bin +2 -2
README.md CHANGED
@@ -293,4 +293,23 @@ The following `bitsandbytes` quantization config was used during training:
293
  ### Framework versions
294
 
295
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
296
  - PEFT 0.6.2
 
293
  ### Framework versions
294
 
295
 
296
+ - PEFT 0.6.2
297
+ ## Training procedure
298
+
299
+
300
+ The following `bitsandbytes` quantization config was used during training:
301
+ - quant_method: bitsandbytes
302
+ - load_in_8bit: False
303
+ - load_in_4bit: True
304
+ - llm_int8_threshold: 6.0
305
+ - llm_int8_skip_modules: None
306
+ - llm_int8_enable_fp32_cpu_offload: False
307
+ - llm_int8_has_fp16_weight: False
308
+ - bnb_4bit_quant_type: nf4
309
+ - bnb_4bit_use_double_quant: True
310
+ - bnb_4bit_compute_dtype: bfloat16
311
+
312
+ ### Framework versions
313
+
314
+
315
  - PEFT 0.6.2
adapter_config.json CHANGED
@@ -8,16 +8,16 @@
8
  "init_lora_weights": true,
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
- "lora_alpha": 16,
12
- "lora_dropout": 0.05,
13
  "modules_to_save": null,
14
  "peft_type": "LORA",
15
- "r": 32,
16
  "rank_pattern": {},
17
  "revision": null,
18
  "target_modules": [
19
- "q_proj",
20
- "v_proj"
21
  ],
22
  "task_type": "CAUSAL_LM"
23
  }
 
8
  "init_lora_weights": true,
9
  "layers_pattern": null,
10
  "layers_to_transform": null,
11
+ "lora_alpha": 32,
12
+ "lora_dropout": 0.1,
13
  "modules_to_save": null,
14
  "peft_type": "LORA",
15
+ "r": 4,
16
  "rank_pattern": {},
17
  "revision": null,
18
  "target_modules": [
19
+ "v_proj",
20
+ "q_proj"
21
  ],
22
  "task_type": "CAUSAL_LM"
23
  }
adapter_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2256d569497439c3791cfff560904f655820575f9d3e27da0191428006fac30c
3
- size 67155338
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3450d20b4101d04b60f719f64282a68d474d4f832dc317678963f387b3cf89fd
3
+ size 8434826