Wedyan2023
commited on
Update app94.py
Browse files
app94.py
CHANGED
@@ -107,7 +107,7 @@ with st.sidebar:
|
|
107 |
|
108 |
temperature = st.slider(
|
109 |
"Temperature",
|
110 |
-
0.0, 1.0, 0.
|
111 |
help="Controls randomness in generation"
|
112 |
)
|
113 |
|
@@ -215,7 +215,7 @@ if "task_choice" in st.session_state:
|
|
215 |
messages=[{"role": "system", "content": system_prompt}],
|
216 |
temperature=temperature,
|
217 |
stream=True,
|
218 |
-
max_tokens=
|
219 |
)
|
220 |
response = st.write_stream(stream)
|
221 |
st.session_state.messages.append({"role": "assistant", "content": response})
|
@@ -244,7 +244,7 @@ if "task_choice" in st.session_state:
|
|
244 |
messages=[{"role": "system", "content": system_prompt}],
|
245 |
temperature=temperature,
|
246 |
stream=True,
|
247 |
-
max_tokens=
|
248 |
)
|
249 |
response = st.write_stream(stream)
|
250 |
st.session_state.messages.append({"role": "assistant", "content": response})
|
|
|
107 |
|
108 |
temperature = st.slider(
|
109 |
"Temperature",
|
110 |
+
0.0, 1.0, 0.7,
|
111 |
help="Controls randomness in generation"
|
112 |
)
|
113 |
|
|
|
215 |
messages=[{"role": "system", "content": system_prompt}],
|
216 |
temperature=temperature,
|
217 |
stream=True,
|
218 |
+
max_tokens=4000,
|
219 |
)
|
220 |
response = st.write_stream(stream)
|
221 |
st.session_state.messages.append({"role": "assistant", "content": response})
|
|
|
244 |
messages=[{"role": "system", "content": system_prompt}],
|
245 |
temperature=temperature,
|
246 |
stream=True,
|
247 |
+
max_tokens=4000,
|
248 |
)
|
249 |
response = st.write_stream(stream)
|
250 |
st.session_state.messages.append({"role": "assistant", "content": response})
|