bwang0911 commited on
Commit
43fafe0
·
verified ·
1 Parent(s): 64ed0bb

Upload 4 files

Browse files
Files changed (3) hide show
  1. config.json +3 -7
  2. tokenizer.json +0 -0
  3. vocab.json +0 -0
config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "architectures": [
3
- "XLMRobertaForMaskedLM"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
6
  "bos_token_id": 0,
@@ -12,14 +12,10 @@
12
  "intermediate_size": 4096,
13
  "layer_norm_eps": 1e-05,
14
  "max_position_embeddings": 514,
15
- "model_type": "xlm-roberta",
16
  "num_attention_heads": 16,
17
  "num_hidden_layers": 24,
18
- "output_past": true,
19
  "pad_token_id": 1,
20
- "position_embedding_type": "absolute",
21
- "transformers_version": "4.17.0.dev0",
22
  "type_vocab_size": 1,
23
- "use_cache": true,
24
- "vocab_size": 250002
25
  }
 
1
  {
2
  "architectures": [
3
+ "RobertaForMaskedLM"
4
  ],
5
  "attention_probs_dropout_prob": 0.1,
6
  "bos_token_id": 0,
 
12
  "intermediate_size": 4096,
13
  "layer_norm_eps": 1e-05,
14
  "max_position_embeddings": 514,
15
+ "model_type": "roberta",
16
  "num_attention_heads": 16,
17
  "num_hidden_layers": 24,
 
18
  "pad_token_id": 1,
 
 
19
  "type_vocab_size": 1,
20
+ "vocab_size": 50265
 
21
  }
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
vocab.json ADDED
The diff for this file is too large to render. See raw diff