Update llm.py
Browse files
llm.py
CHANGED
@@ -93,7 +93,7 @@ elif thinker in "70b|405b":
|
|
93 |
CTXLEN = CTXLEN*1024 - MAX_OUTPUT_TOKENS
|
94 |
|
95 |
from together import Together
|
96 |
-
together_client = Together(api_key=)
|
97 |
###
|
98 |
stops = ["<|eot_id|>","<|eom_id|>","</answer>","</output>"]
|
99 |
def thinker_chat(prompt, history=[], stream=False, use_cache=True, testing=False):
|
|
|
93 |
CTXLEN = CTXLEN*1024 - MAX_OUTPUT_TOKENS
|
94 |
|
95 |
from together import Together
|
96 |
+
together_client = Together(api_key=os.getenv('together_token'))
|
97 |
###
|
98 |
stops = ["<|eot_id|>","<|eom_id|>","</answer>","</output>"]
|
99 |
def thinker_chat(prompt, history=[], stream=False, use_cache=True, testing=False):
|