File size: 649 Bytes
162e1e2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
cutoff_len: 1024
dataset: dpo_zh_emoji_rj_en
dataset_dir: data
do_train: true
finetuning_type: lora
flash_attn: auto
fp16: true
gradient_accumulation_steps: 8
learning_rate: 5.0e-05
logging_steps: 5
lora_alpha: 16
lora_dropout: 0
lora_rank: 8
lora_target: q_proj,v_proj
lr_scheduler_type: cosine
max_grad_norm: 1.0
max_samples: 100000
model_name_or_path: alpindale/Mistral-7B-v0.2-hf
num_train_epochs: 3.0
optim: adamw_torch
orpo_beta: 0.1
output_dir: saves/Mistral-7B-v0.2/lora/train_2024-05-13-15-43-20
packing: false
per_device_train_batch_size: 1
quantization_bit: 4
report_to: none
save_steps: 100
stage: orpo
template: mistral
warmup_steps: 0
|