KhaldiAbderrhmane commited on
Commit
accaf20
·
verified ·
1 Parent(s): 3eaaf2f

Update config.py

Browse files
Files changed (1) hide show
  1. config.py +28 -28
config.py CHANGED
@@ -1,28 +1,28 @@
1
- from transformers import PretrainedConfig, AutoConfig
2
-
3
- class BERTMultiAttentionConfig(PretrainedConfig):
4
- model_type = "bert_multi_attention"
5
- keys_to_ignore_at_inference = ["dropout"]
6
-
7
- def __init__(
8
- self,
9
- transformer="bert-base-uncased",
10
- hidden_size=768,
11
- num_heads=8,
12
- dropout=0.1,
13
- rnn_hidden_size=128,
14
- rnn_num_layers=2,
15
- rnn_bidirectional=True,
16
- **kwargs
17
- ):
18
- super().__init__(**kwargs)
19
- self.transformer = transformer
20
- self.hidden_size = hidden_size
21
- self.num_heads = num_heads
22
- self.dropout = dropout
23
- self.rnn_hidden_size = rnn_hidden_size
24
- self.rnn_num_layers = rnn_num_layers
25
- self.rnn_bidirectional = rnn_bidirectional
26
-
27
-
28
- AutoConfig.register("bert_multi_attention", BERTMultiAttentionConfig)
 
1
+ from transformers import PretrainedConfig, AutoConfig
2
+
3
+ class BERTMultiAttentionConfig(PretrainedConfig):
4
+ model_type = "bert_multi_attention"
5
+ # keys_to_ignore_at_inference = ["dropout"]
6
+
7
+ def __init__(
8
+ self,
9
+ transformer="bert-base-uncased",
10
+ hidden_size=768,
11
+ num_heads=8,
12
+ dropout=0.1,
13
+ rnn_hidden_size=128,
14
+ rnn_num_layers=2,
15
+ rnn_bidirectional=True,
16
+ **kwargs
17
+ ):
18
+ super().__init__(**kwargs)
19
+ self.transformer = transformer
20
+ self.hidden_size = hidden_size
21
+ self.num_heads = num_heads
22
+ self.dropout = dropout
23
+ self.rnn_hidden_size = rnn_hidden_size
24
+ self.rnn_num_layers = rnn_num_layers
25
+ self.rnn_bidirectional = rnn_bidirectional
26
+
27
+
28
+ AutoConfig.register("bert_multi_attention", BERTMultiAttentionConfig)