not-lain commited on
Commit
0e43bac
·
verified ·
1 Parent(s): 340dfab

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -32,7 +32,13 @@
32
  "rstrip": false,
33
  "single_word": false
34
  },
35
- "pad_token": "<|endoftext|>",
 
 
 
 
 
 
36
  "unk_token": {
37
  "content": "<|endoftext|>",
38
  "lstrip": false,
 
32
  "rstrip": false,
33
  "single_word": false
34
  },
35
+ "pad_token": {
36
+ "content": "<|endoftext|>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false
41
+ },
42
  "unk_token": {
43
  "content": "<|endoftext|>",
44
  "lstrip": false,
tokenizer.json CHANGED
@@ -1,11 +1,6 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 1024,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
  "padding": null,
10
  "added_tokens": [
11
  {
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
 
 
 
 
 
4
  "padding": null,
5
  "added_tokens": [
6
  {
tokenizer_config.json CHANGED
@@ -161,9 +161,13 @@
161
  "chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
162
  "clean_up_tokenization_spaces": false,
163
  "eos_token": "<|endoftext|>",
 
164
  "model_max_length": 1000000000000000019884624838656,
165
  "pad_token": "<|endoftext|>",
 
166
  "tokenizer_class": "GPT2Tokenizer",
 
 
167
  "unk_token": "<|endoftext|>",
168
  "vocab_size": 49152
169
  }
 
161
  "chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
162
  "clean_up_tokenization_spaces": false,
163
  "eos_token": "<|endoftext|>",
164
+ "max_length": 1024,
165
  "model_max_length": 1000000000000000019884624838656,
166
  "pad_token": "<|endoftext|>",
167
+ "stride": 0,
168
  "tokenizer_class": "GPT2Tokenizer",
169
+ "truncation_side": "right",
170
+ "truncation_strategy": "longest_first",
171
  "unk_token": "<|endoftext|>",
172
  "vocab_size": 49152
173
  }