Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -48,17 +48,16 @@ def chat_inf(system_prompt, prompt, history, client_choice, seed, temp, tokens,
|
|
48 |
hist_len = len(history)
|
49 |
print(hist_len)
|
50 |
|
51 |
-
generate_kwargs = dict(
|
52 |
-
temperature=temp,
|
53 |
-
max_new_tokens=tokens,
|
54 |
-
top_p=top_p,
|
55 |
-
repetition_penalty=rep_p,
|
56 |
-
do_sample=True,
|
57 |
-
seed=seed,
|
58 |
-
)
|
59 |
formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
|
60 |
-
stream = client.generate(formatted_prompt
|
61 |
-
return_full_text=False)
|
62 |
output = ""
|
63 |
|
64 |
for response in stream:
|
@@ -84,7 +83,7 @@ def check_rand(inp, val):
|
|
84 |
|
85 |
with gr.Blocks() as app:
|
86 |
gr.HTML(
|
87 |
-
"""<center><h1 style='font-size:xx-large;'>
|
88 |
with gr.Group():
|
89 |
with gr.Row():
|
90 |
client_choice = gr.Dropdown(label="Models", type='index', choices=[c for c in models], value=models[0],
|
|
|
48 |
hist_len = len(history)
|
49 |
print(hist_len)
|
50 |
|
51 |
+
#generate_kwargs = dict(
|
52 |
+
#temperature=temp,
|
53 |
+
#max_new_tokens=tokens,
|
54 |
+
#top_p=top_p,
|
55 |
+
#repetition_penalty=rep_p,
|
56 |
+
#do_sample=True,
|
57 |
+
#seed=seed,
|
58 |
+
#)
|
59 |
formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
|
60 |
+
stream = client.generate(formatted_prompt)
|
|
|
61 |
output = ""
|
62 |
|
63 |
for response in stream:
|
|
|
83 |
|
84 |
with gr.Blocks() as app:
|
85 |
gr.HTML(
|
86 |
+
"""<center><h1 style='font-size:xx-large;'>CalmChat:A mental Health Conversational Agent</h1></center>""")
|
87 |
with gr.Group():
|
88 |
with gr.Row():
|
89 |
client_choice = gr.Dropdown(label="Models", type='index', choices=[c for c in models], value=models[0],
|