tiendung commited on
Commit
438ecdd
·
verified ·
1 Parent(s): 538f422

Update llm.py

Browse files
Files changed (1) hide show
  1. llm.py +1 -1
llm.py CHANGED
@@ -93,7 +93,7 @@ elif thinker in "70b|405b":
93
  CTXLEN = CTXLEN*1024 - MAX_OUTPUT_TOKENS
94
 
95
  from together import Together
96
- together_client = Together(api_key=)
97
  ###
98
  stops = ["<|eot_id|>","<|eom_id|>","</answer>","</output>"]
99
  def thinker_chat(prompt, history=[], stream=False, use_cache=True, testing=False):
 
93
  CTXLEN = CTXLEN*1024 - MAX_OUTPUT_TOKENS
94
 
95
  from together import Together
96
+ together_client = Together(api_key=os.getenv('together_token'))
97
  ###
98
  stops = ["<|eot_id|>","<|eom_id|>","</answer>","</output>"]
99
  def thinker_chat(prompt, history=[], stream=False, use_cache=True, testing=False):