beomi commited on
Commit
7d595ad
·
1 Parent(s): 7bfa8bb

cased 300 length version

Browse files
Files changed (2) hide show
  1. flax_model.msgpack +1 -1
  2. tokenizer_config.json +1 -1
flax_model.msgpack CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ba8690145741abefa5160a5918228bbb0058494b83d5daa5a4101f1d4a0555cf
3
  size 1126812575
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5eafa57ba24a6a98989db72ba4ee2ac301c0debc53b129dc9d57201aaa33795f
3
  size 1126812575
tokenizer_config.json CHANGED
@@ -1 +1 @@
1
- {"do_lower_case": false, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "do_basic_tokenize": true, "never_split": null, "special_tokens_map_file": "KcT5-base-cased/special_tokens_map.json", "name_or_path": "KcT5-base-cased", "tokenizer_class": "BertTokenizer"}
 
1
+ {"do_lower_case": false, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "do_basic_tokenize": true, "never_split": null, "special_tokens_map_file": "special_tokens_map.json", "tokenizer_class": "BertTokenizer"}