Increase max_new_tokens
Browse filesCo-authored-by: Wing Lian <[email protected]>
- scripts/finetune.py +1 -1
scripts/finetune.py
CHANGED
@@ -83,7 +83,7 @@ def do_inference(cfg, model, tokenizer, prompter="AlpacaPrompter"):
|
|
83 |
with torch.no_grad():
|
84 |
generation_config = GenerationConfig(
|
85 |
repetition_penalty=1.1,
|
86 |
-
max_new_tokens=
|
87 |
temperature=0.9,
|
88 |
top_p=0.95,
|
89 |
top_k=40,
|
|
|
83 |
with torch.no_grad():
|
84 |
generation_config = GenerationConfig(
|
85 |
repetition_penalty=1.1,
|
86 |
+
max_new_tokens=1024,
|
87 |
temperature=0.9,
|
88 |
top_p=0.95,
|
89 |
top_k=40,
|