Tonic commited on
Commit
2e05228
·
1 Parent(s): 92053d2

wrap automodel for zerogpu

Browse files
Files changed (1) hide show
  1. langchainapp.py +1 -1
langchainapp.py CHANGED
@@ -43,7 +43,7 @@ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
43
  ### Utils
44
  hf_token, yi_token = load_env_variables()
45
 
46
- tokenizer = AutoTokenizer.from_pretrained(model_name, token=hf_token, trust_remote_code=True)
47
  # Lazy load model
48
  model = None
49
 
 
43
  ### Utils
44
  hf_token, yi_token = load_env_variables()
45
 
46
+ # tokenizer = AutoTokenizer.from_pretrained(model_name, token=hf_token, trust_remote_code=True)
47
  # Lazy load model
48
  model = None
49