Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -98,11 +98,11 @@ def sidebar():
|
|
98 |
unsafe_allow_html=True
|
99 |
)
|
100 |
|
101 |
-
system_promptSide = st.text_input("Optional system prompt:")
|
102 |
temperatureSide = st.slider("Temperature", min_value=0.0, max_value=1.0, value=0.9, step=0.05)
|
103 |
max_new_tokensSide = st.slider("Max new tokens", min_value=0.0, max_value=4096.0, value=4096.0, step=64.0)
|
104 |
-
ToppSide = st.slider("Top-p (nucleus sampling)", min_value=0.0, max_value=1.0, value=0.6, step=0.05)
|
105 |
-
RepetitionpenaltySide = st.slider("Repetition penalty", min_value=0.0, max_value=2.0, value=1.2, step=0.05)
|
106 |
|
107 |
|
108 |
def predict(message: str) -> Any:
|
@@ -112,11 +112,11 @@ def predict(message: str) -> Any:
|
|
112 |
client = Client("https://ysharma-explore-llamav2-with-tgi.hf.space/")
|
113 |
response = client.predict(
|
114 |
message,
|
115 |
-
|
116 |
temperatureSide,
|
117 |
max_new_tokensSide,
|
118 |
-
|
119 |
-
|
120 |
api_name="/chat_1"
|
121 |
)
|
122 |
return response
|
|
|
98 |
unsafe_allow_html=True
|
99 |
)
|
100 |
|
101 |
+
# system_promptSide = st.text_input("Optional system prompt:")
|
102 |
temperatureSide = st.slider("Temperature", min_value=0.0, max_value=1.0, value=0.9, step=0.05)
|
103 |
max_new_tokensSide = st.slider("Max new tokens", min_value=0.0, max_value=4096.0, value=4096.0, step=64.0)
|
104 |
+
# ToppSide = st.slider("Top-p (nucleus sampling)", min_value=0.0, max_value=1.0, value=0.6, step=0.05)
|
105 |
+
# RepetitionpenaltySide = st.slider("Repetition penalty", min_value=0.0, max_value=2.0, value=1.2, step=0.05)
|
106 |
|
107 |
|
108 |
def predict(message: str) -> Any:
|
|
|
112 |
client = Client("https://ysharma-explore-llamav2-with-tgi.hf.space/")
|
113 |
response = client.predict(
|
114 |
message,
|
115 |
+
'',
|
116 |
temperatureSide,
|
117 |
max_new_tokensSide,
|
118 |
+
0.6,
|
119 |
+
1.2,
|
120 |
api_name="/chat_1"
|
121 |
)
|
122 |
return response
|