File size: 428 Bytes
0e4679d 7c5364b 0e4679d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 |
{
"model_name_or_path": "NousResearch/Llama-2-7b-hf",
"max_length": 256,
"model_kwargs": {},
"pooling_strategy": "last",
"lora_config_kwargs": {
"task_type": "CAUSAL_LM",
"r": 32,
"lora_alpha": 32,
"lora_dropout": 0.1,
"bias": "none"
},
"is_llm": true,
"apply_billm": false,
"billm_model_class": null,
"apply_lora": true,
"tokenizer_padding_side": null,
"angle_emb_version": "0.5.1"
} |