winglian commited on
Commit
ba944e6
·
unverified ·
1 Parent(s): badda37

workaround for transformers bug requireing do_sample for saveing pretrained (#1206)

Browse files
Files changed (1) hide show
  1. src/axolotl/train.py +2 -0
src/axolotl/train.py CHANGED
@@ -63,6 +63,8 @@ def train(
63
  msg += " and peft_config..."
64
  LOG.debug(msg)
65
  model, peft_config = load_model(cfg, tokenizer, inference=cli_args.inference)
 
 
66
  model_ref = None
67
  if cfg.rl:
68
  if cfg.adapter and not cfg.rl_adapter_ref_model:
 
63
  msg += " and peft_config..."
64
  LOG.debug(msg)
65
  model, peft_config = load_model(cfg, tokenizer, inference=cli_args.inference)
66
+ model.generation_config.do_sample = True
67
+
68
  model_ref = None
69
  if cfg.rl:
70
  if cfg.adapter and not cfg.rl_adapter_ref_model: