Gizachew commited on
Commit
8ca379c
·
verified ·
1 Parent(s): 29512b9

Upload TrOCR processor for Ethiopic OCR

Browse files
Files changed (2) hide show
  1. tokenizer.json +0 -0
  2. tokenizer_config.json +1 -8
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -43,23 +43,16 @@
43
  }
44
  },
45
  "bos_token": "<s>",
46
- "clean_up_tokenization_spaces": false,
47
  "cls_token": "<s>",
48
  "eos_token": "</s>",
49
  "errors": "replace",
50
  "mask_token": "<mask>",
51
- "max_length": 128,
52
  "model_max_length": 512,
53
- "pad_to_multiple_of": null,
54
  "pad_token": "<pad>",
55
- "pad_token_type_id": 0,
56
- "padding_side": "right",
57
  "processor_class": "TrOCRProcessor",
58
  "sep_token": "</s>",
59
- "stride": 0,
60
  "tokenizer_class": "RobertaTokenizer",
61
  "trim_offsets": true,
62
- "truncation_side": "right",
63
- "truncation_strategy": "longest_first",
64
  "unk_token": "<unk>"
65
  }
 
43
  }
44
  },
45
  "bos_token": "<s>",
46
+ "clean_up_tokenization_spaces": true,
47
  "cls_token": "<s>",
48
  "eos_token": "</s>",
49
  "errors": "replace",
50
  "mask_token": "<mask>",
 
51
  "model_max_length": 512,
 
52
  "pad_token": "<pad>",
 
 
53
  "processor_class": "TrOCRProcessor",
54
  "sep_token": "</s>",
 
55
  "tokenizer_class": "RobertaTokenizer",
56
  "trim_offsets": true,
 
 
57
  "unk_token": "<unk>"
58
  }