Training in progress, step 500
Browse files- model.safetensors +1 -1
- tokenizer_config.json +1 -1
- training_args.bin +1 -1
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 538090408
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f8e68765da3f97b3fe5f88e61ddecb1cb53e50dd9c30c64ec89f7af466e1b262
|
3 |
size 538090408
|
tokenizer_config.json
CHANGED
@@ -158,7 +158,7 @@
|
|
158 |
"<empty_output>"
|
159 |
],
|
160 |
"bos_token": "<|endoftext|>",
|
161 |
-
"chat_template": "\n{{- messages | selectattr(\"content\", \"defined\") | map(attribute=\"content\") | join(
|
162 |
"clean_up_tokenization_spaces": false,
|
163 |
"eos_token": "<|endoftext|>",
|
164 |
"model_max_length": 8192,
|
|
|
158 |
"<empty_output>"
|
159 |
],
|
160 |
"bos_token": "<|endoftext|>",
|
161 |
+
"chat_template": "\n{{- messages | selectattr(\"content\", \"defined\") | map(attribute=\"content\") | join() -}}\n{%- if not add_generation_prompt %}\n {{- eos_token }}\n{%- endif %}\n",
|
162 |
"clean_up_tokenization_spaces": false,
|
163 |
"eos_token": "<|endoftext|>",
|
164 |
"model_max_length": 8192,
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 5816
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ce1671b5eb5b1fa3a384b9bea4e7206ef8728cd10d64015ba423dfa4eb488ccc
|
3 |
size 5816
|