KhaldiAbderrhmane commited on
Commit
71bd99f
·
verified ·
1 Parent(s): 7cc41ff

Upload config

Browse files
Files changed (2) hide show
  1. config.json +18 -19
  2. config.py +28 -28
config.json CHANGED
@@ -1,19 +1,18 @@
1
- {
2
- "architectures": [
3
- "BERTMultiAttentionModel"
4
- ],
5
- "auto_map": {
6
- "AutoConfig": "config.BERTMultiAttentionConfig",
7
- "AutoModel": "model.BERTMultiAttentionModel"
8
- },
9
- "dropout": 0.1,
10
- "hidden_size": 768,
11
- "model_type": "bert_multi_attention",
12
- "num_heads": 8,
13
- "rnn_bidirectional": true,
14
- "rnn_hidden_size": 128,
15
- "rnn_num_layers": 2,
16
- "torch_dtype": "float32",
17
- "transformer": "bert-base-uncased",
18
- "transformers_version": "4.38.2"
19
- }
 
1
+ {
2
+ "architectures": [
3
+ "BERTMultiAttentionModel"
4
+ ],
5
+ "auto_map": {
6
+ "AutoConfig": "config.BERTMultiAttentionConfig"
7
+ },
8
+ "dropout": 0.1,
9
+ "hidden_size": 768,
10
+ "model_type": "bert_multi_attention",
11
+ "num_heads": 8,
12
+ "rnn_bidirectional": true,
13
+ "rnn_hidden_size": 128,
14
+ "rnn_num_layers": 2,
15
+ "torch_dtype": "float32",
16
+ "transformer": "bert-base-uncased",
17
+ "transformers_version": "4.37.2"
18
+ }
 
config.py CHANGED
@@ -1,28 +1,28 @@
1
- from transformers import PretrainedConfig, AutoConfig
2
-
3
- class BERTMultiAttentionConfig(PretrainedConfig):
4
- model_type = "bert_multi_attention"
5
- # keys_to_ignore_at_inference = ["dropout"]
6
-
7
- def __init__(
8
- self,
9
- transformer="bert-base-uncased",
10
- hidden_size=768,
11
- num_heads=8,
12
- dropout=0.1,
13
- rnn_hidden_size=128,
14
- rnn_num_layers=2,
15
- rnn_bidirectional=True,
16
- **kwargs
17
- ):
18
- super().__init__(**kwargs)
19
- self.transformer = transformer
20
- self.hidden_size = hidden_size
21
- self.num_heads = num_heads
22
- self.dropout = dropout
23
- self.rnn_hidden_size = rnn_hidden_size
24
- self.rnn_num_layers = rnn_num_layers
25
- self.rnn_bidirectional = rnn_bidirectional
26
-
27
-
28
- AutoConfig.register("bert_multi_attention", BERTMultiAttentionConfig)
 
1
+ from transformers import PretrainedConfig, AutoConfig
2
+
3
+ class BERTMultiAttentionConfig(PretrainedConfig):
4
+ model_type = "bert_multi_attention"
5
+ keys_to_ignore_at_inference = ["dropout"]
6
+
7
+ def __init__(
8
+ self,
9
+ transformer="bert-base-uncased",
10
+ hidden_size=768,
11
+ num_heads=8,
12
+ dropout=0.1,
13
+ rnn_hidden_size=128,
14
+ rnn_num_layers=2,
15
+ rnn_bidirectional=True,
16
+ **kwargs
17
+ ):
18
+ super().__init__(**kwargs)
19
+ self.transformer = transformer
20
+ self.hidden_size = hidden_size
21
+ self.num_heads = num_heads
22
+ self.dropout = dropout
23
+ self.rnn_hidden_size = rnn_hidden_size
24
+ self.rnn_num_layers = rnn_num_layers
25
+ self.rnn_bidirectional = rnn_bidirectional
26
+
27
+
28
+ AutoConfig.register("bert_multi_attention", BERTMultiAttentionConfig)