Nanobit winglian commited on
Commit
33d4017
·
unverified ·
1 Parent(s): 988aeb9

Increase max_new_tokens

Browse files

Co-authored-by: Wing Lian <[email protected]>

Files changed (1) hide show
  1. scripts/finetune.py +1 -1
scripts/finetune.py CHANGED
@@ -83,7 +83,7 @@ def do_inference(cfg, model, tokenizer, prompter="AlpacaPrompter"):
83
  with torch.no_grad():
84
  generation_config = GenerationConfig(
85
  repetition_penalty=1.1,
86
- max_new_tokens=100,
87
  temperature=0.9,
88
  top_p=0.95,
89
  top_k=40,
 
83
  with torch.no_grad():
84
  generation_config = GenerationConfig(
85
  repetition_penalty=1.1,
86
+ max_new_tokens=1024,
87
  temperature=0.9,
88
  top_p=0.95,
89
  top_k=40,