File size: 495 Bytes
67c2f1f f7f1686 67c2f1f f7f1686 67c2f1f f7f1686 67c2f1f f7f1686 67c2f1f f7f1686 67c2f1f f7f1686 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 |
cutoff_len: 1024
dataset_dir: data
do_predict: true
eval_dataset: truth_dev
finetuning_type: full
flash_attn: auto
max_new_tokens: 512
max_samples: 100000
model_name_or_path: saves/LLaMA3.1-8B-Chat/full/train_2024-07-29-19-43-56_llama3.1_reeval_final
output_dir: saves/LLaMA3.1-8B-Chat/full/eval_2024-07-30-01-57-05
per_device_eval_batch_size: 2
predict_with_generate: true
preprocessing_num_workers: 16
quantization_method: bitsandbytes
stage: sft
temperature: 0.95
template: llama3
top_p: 0.7
|