Nanobit commited on
Commit
08719b9
·
unverified ·
1 Parent(s): 1427d5b

fix(log): improve warning to clarify that lora_modules_to_save expect a list (#1197)

Browse files
Files changed (1) hide show
  1. src/axolotl/utils/models.py +2 -2
src/axolotl/utils/models.py CHANGED
@@ -67,7 +67,7 @@ def check_model_config(cfg: DictDefault, model_config: Union[AutoConfig, DictDef
67
  ):
68
  lora_modules_to_save = ", ".join(map(lambda x: f"`{x}`", lora_modules_to_save))
69
  raise ValueError(
70
- f"`lora_modules_to_save` not properly set when adding new tokens. Please include {lora_modules_to_save} in `lora_modules_to_save`."
71
  )
72
 
73
 
@@ -182,7 +182,7 @@ def load_tokenizer(cfg):
182
  [f"`{x}`" for x in lora_modules_to_save]
183
  )
184
  raise ValueError(
185
- f"Please set lora_modules_to_save to {lora_modules_to_save} when using an adapter and changing the special tokens."
186
  )
187
 
188
  tokenizer.add_special_tokens(
 
67
  ):
68
  lora_modules_to_save = ", ".join(map(lambda x: f"`{x}`", lora_modules_to_save))
69
  raise ValueError(
70
+ f"`lora_modules_to_save` not properly set when adding new tokens. Please include [{lora_modules_to_save}] in `lora_modules_to_save`."
71
  )
72
 
73
 
 
182
  [f"`{x}`" for x in lora_modules_to_save]
183
  )
184
  raise ValueError(
185
+ f"Please set lora_modules_to_save to [{lora_modules_to_save}] when using an adapter and changing the special tokens."
186
  )
187
 
188
  tokenizer.add_special_tokens(