PyTorch
llama
alignment-handbook
Generated from Trainer
File size: 454 Bytes
dc62a95
d350c32
 
 
846f1e3
d350c32
 
 
 
 
 
846f1e3
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
MambaInLlama_0_875:
  prompt_template: "zephyr-7b-alpha/prompt.txt"
  fn_completions: "huggingface_local_completions"
  completions_kwargs:
    model_name: "JunxiongWang/MambaInLlama_0_875"
    model_kwargs:
      torch_dtype: 'bfloat16'
    max_new_tokens: 2048
    temperature: 0.7
    top_p: 1.0
    do_sample: True
  pretty_name: "Mamba 0 875 From meta-llama/Meta-Llama-3-8B-Instruct"
  link: "https://huggingface.co/JunxiongWang/MambaInLlama_0_875"