Spaces:
Runtime error
Runtime error
fix: the maximum number of tokens
Browse files
app.py
CHANGED
@@ -31,7 +31,7 @@ def format_prompt(message, history, system_prompt):
|
|
31 |
prompt += f"System: {system_prompt}\n"
|
32 |
for user_prompt, bot_response in history:
|
33 |
prompt += f"User: {user_prompt}\n"
|
34 |
-
prompt += f"
|
35 |
prompt += f"""User: {message}
|
36 |
Falcon:"""
|
37 |
return prompt
|
@@ -39,7 +39,7 @@ Falcon:"""
|
|
39 |
seed = 42
|
40 |
|
41 |
def generate(
|
42 |
-
prompt, history, system_prompt="<|endoftext|>", temperature=0.9, max_new_tokens=
|
43 |
):
|
44 |
temperature = float(temperature)
|
45 |
if temperature < 1e-2:
|
@@ -86,9 +86,9 @@ additional_inputs=[
|
|
86 |
),
|
87 |
gr.Slider(
|
88 |
label="Max new tokens",
|
89 |
-
value=
|
90 |
minimum=0,
|
91 |
-
maximum=
|
92 |
step=64,
|
93 |
interactive=True,
|
94 |
info="The maximum numbers of new tokens",
|
|
|
31 |
prompt += f"System: {system_prompt}\n"
|
32 |
for user_prompt, bot_response in history:
|
33 |
prompt += f"User: {user_prompt}\n"
|
34 |
+
prompt += f"PersianGPT: {bot_response}\n" # Response already contains "PersianGPT: "
|
35 |
prompt += f"""User: {message}
|
36 |
Falcon:"""
|
37 |
return prompt
|
|
|
39 |
seed = 42
|
40 |
|
41 |
def generate(
|
42 |
+
prompt, history, system_prompt="<|endoftext|>", temperature=0.9, max_new_tokens=100, top_p=0.95, repetition_penalty=1.0,
|
43 |
):
|
44 |
temperature = float(temperature)
|
45 |
if temperature < 1e-2:
|
|
|
86 |
),
|
87 |
gr.Slider(
|
88 |
label="Max new tokens",
|
89 |
+
value=100,
|
90 |
minimum=0,
|
91 |
+
maximum=250,
|
92 |
step=64,
|
93 |
interactive=True,
|
94 |
info="The maximum numbers of new tokens",
|