Spaces:
Running
on
T4
Running
on
T4
Update app.py
Browse files
app.py
CHANGED
@@ -19,6 +19,8 @@ from rwkv.utils import PIPELINE, PIPELINE_ARGS
|
|
19 |
pipeline = PIPELINE(model, "20B_tokenizer.json")
|
20 |
|
21 |
def generate_prompt(instruction, input=None):
|
|
|
|
|
22 |
if input:
|
23 |
return f"""Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request.
|
24 |
|
@@ -54,8 +56,8 @@ def evaluate(
|
|
54 |
token_ban = [], # ban the generation of some tokens
|
55 |
token_stop = [0]) # stop generation whenever you see any token here
|
56 |
|
57 |
-
instruction = instruction.strip()
|
58 |
-
input = input.strip()
|
59 |
ctx = generate_prompt(instruction, input)
|
60 |
|
61 |
all_tokens = []
|
|
|
19 |
pipeline = PIPELINE(model, "20B_tokenizer.json")
|
20 |
|
21 |
def generate_prompt(instruction, input=None):
|
22 |
+
instruction = instruction.strip().replace('\r\n','\n').replace('\n\n','\n')
|
23 |
+
input = input.strip().replace('\r\n','\n').replace('\n\n','\n')
|
24 |
if input:
|
25 |
return f"""Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request.
|
26 |
|
|
|
56 |
token_ban = [], # ban the generation of some tokens
|
57 |
token_stop = [0]) # stop generation whenever you see any token here
|
58 |
|
59 |
+
instruction = instruction.strip().replace('\r\n','\n').replace('\n\n','\n')
|
60 |
+
input = input.strip().replace('\r\n','\n').replace('\n\n','\n')
|
61 |
ctx = generate_prompt(instruction, input)
|
62 |
|
63 |
all_tokens = []
|