leobitz commited on
Commit
76ec6fb
·
verified ·
1 Parent(s): 11fe198

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +3 -4
config.json CHANGED
@@ -1,13 +1,12 @@
1
  {
2
  "_name_or_path": "prajjwal1/bert-small",
3
  "architectures": [
4
- "TsgBertForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "backbone_output_dropout": 0.5,
8
  "chunk_overlap_size": 64,
9
  "classifier_dropout": 0.5,
10
- "classifier_num_layers": 1,
11
  "classifier_prob": 0.5,
12
  "hidden_act": "gelu",
13
  "hidden_dropout_prob": 0.1,
@@ -26,8 +25,8 @@
26
  "long_input": false,
27
  "max_len": 512,
28
  "max_position_embeddings": 512,
29
- "model_name": "tsg-bert",
30
- "model_type": "tsg-bert",
31
  "num_attention_heads": 8,
32
  "num_hidden_layers": 4,
33
  "pad_token_id": 0,
 
1
  {
2
  "_name_or_path": "prajjwal1/bert-small",
3
  "architectures": [
4
+ "BertForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "backbone_output_dropout": 0.5,
8
  "chunk_overlap_size": 64,
9
  "classifier_dropout": 0.5,
 
10
  "classifier_prob": 0.5,
11
  "hidden_act": "gelu",
12
  "hidden_dropout_prob": 0.1,
 
25
  "long_input": false,
26
  "max_len": 512,
27
  "max_position_embeddings": 512,
28
+ "model_name": "prajjwal1/bert-small",
29
+ "model_type": "bert",
30
  "num_attention_heads": 8,
31
  "num_hidden_layers": 4,
32
  "pad_token_id": 0,