nazneen commited on
Commit
9480df8
Β·
1 Parent(s): 54cab03

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -2
app.py CHANGED
@@ -14,9 +14,8 @@ theme = gr.themes.Monochrome(
14
 
15
  TOKEN = os.getenv("USER_TOKEN")
16
  tokenizer = AutoTokenizer.from_pretrained("tiiuae/falcon-7b-instruct")
17
- instruct_pipeline_falcon = pipeline(model="tiiuae/falcon-7b-instruct", tokenizer = tokenizer, torch_dtype=torch.bfloat16, trust_remote_code=True, device_map="auto")
18
  instruct_pipeline_llama = pipeline(model="HuggingFaceH4/llama-7b-ift-ds-save-test4", torch_dtype=torch.bfloat16, trust_remote_code=True, device_map="auto", use_auth_token=TOKEN)
19
- #instruct_pipeline_12b = pipeline(model="databricks/dolly-v2-12b", torch_dtype=torch.bfloat16, trust_remote_code=True, device_map="auto")
20
 
21
  def generate(query, temperature, top_p, top_k, max_new_tokens):
22
  return [instruct_pipeline_falcon(query, temperature=temperature, top_p=top_p, top_k=top_k, max_new_tokens=max_new_tokens),
 
14
 
15
  TOKEN = os.getenv("USER_TOKEN")
16
  tokenizer = AutoTokenizer.from_pretrained("tiiuae/falcon-7b-instruct")
17
+ instruct_pipeline_falcon = pipeline(model="tiiuae/falcon-7b-instruct", tokenizer = tokenizer, torch_dtype=torch.bfloat16, trust_remote_code=True, device_map="auto", device=0)
18
  instruct_pipeline_llama = pipeline(model="HuggingFaceH4/llama-7b-ift-ds-save-test4", torch_dtype=torch.bfloat16, trust_remote_code=True, device_map="auto", use_auth_token=TOKEN)
 
19
 
20
  def generate(query, temperature, top_p, top_k, max_new_tokens):
21
  return [instruct_pipeline_falcon(query, temperature=temperature, top_p=top_p, top_k=top_k, max_new_tokens=max_new_tokens),