File size: 113 Bytes
de78601 |
1 2 3 4 5 6 7 |
{
"lora_alpha": 16,
"lora_dropout": 0.1,
"r": 64,
"bias": "none",
"task_type": "CAUSAL_LM",
} |
de78601 |
1 2 3 4 5 6 7 |
{
"lora_alpha": 16,
"lora_dropout": 0.1,
"r": 64,
"bias": "none",
"task_type": "CAUSAL_LM",
} |