Update modeling_hf_nomic_bert.py
Browse files
modeling_hf_nomic_bert.py
CHANGED
@@ -805,12 +805,12 @@ class NomicBertAttention(nn.Module):
|
|
805 |
return attn_output
|
806 |
|
807 |
|
808 |
-
class NomicBertBlock(
|
809 |
def __init__(
|
810 |
self,
|
811 |
config,
|
812 |
):
|
813 |
-
super().__init__()
|
814 |
self.prenorm = config.prenorm
|
815 |
self.fused_dropout_add_ln = config.fused_dropout_add_ln
|
816 |
|
@@ -1218,4 +1218,4 @@ class NomicBertForSequenceClassification(NomicBertPreTrainedModel):
|
|
1218 |
logits=logits,
|
1219 |
hidden_states=outputs.hidden_states,
|
1220 |
attentions=outputs.attentions,
|
1221 |
-
)
|
|
|
805 |
return attn_output
|
806 |
|
807 |
|
808 |
+
class NomicBertBlock(NomicBertPreTrainedModel):
|
809 |
def __init__(
|
810 |
self,
|
811 |
config,
|
812 |
):
|
813 |
+
super().__init__(config=config)
|
814 |
self.prenorm = config.prenorm
|
815 |
self.fused_dropout_add_ln = config.fused_dropout_add_ln
|
816 |
|
|
|
1218 |
logits=logits,
|
1219 |
hidden_states=outputs.hidden_states,
|
1220 |
attentions=outputs.attentions,
|
1221 |
+
)
|