File size: 450 Bytes
6d78e16
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
adapter_name_or_path: saves/LLaMA-tiny/lora/train_2024-05-05-10-57-50
cutoff_len: 1024
dataset: alpaca_en
dataset_dir: data
do_predict: true
finetuning_type: lora
flash_attn: auto
max_new_tokens: 512
max_samples: 100000
model_name_or_path: TinyLlama/TinyLlama-1.1B-Chat-v1.0
output_dir: saves/LLaMA-tiny/lora/eval_2024-05-05-11-49-16
per_device_eval_batch_size: 2
predict_with_generate: true
stage: sft
temperature: 0.95
template: default
top_p: 0.7