mamba_0_5_dpo_ep3 / configs.yaml
Junxiong Wang
add models
1f9986e
raw
history blame contribute delete
428 Bytes
mamba_0_5_dpo_ep3:
prompt_template: "zephyr-7b-alpha/prompt.txt"
fn_completions: "huggingface_local_completions"
completions_kwargs:
model_name: "JunxiongWang/mamba_0_5_dpo_ep3"
model_kwargs:
torch_dtype: 'bfloat16'
max_new_tokens: 2048
temperature: 0.7
top_p: 1.0
do_sample: True
pretty_name: "Mamba 0 5 From Zephyr 7B Beta"
link: "https://huggingface.co/JunxiongWang/mamba_0_5_dpo_ep3"