ahatamiz commited on
Commit
322a037
·
verified ·
1 Parent(s): 50a8150

Upload model

Browse files
configuration_mambavision.py CHANGED
@@ -23,15 +23,4 @@ class MambaVisionConfig(PretrainedConfig):
23
  self.in_dim = in_dim
24
  self.mlp_ratio = mlp_ratio
25
  self.drop_path_rate = drop_path_rate
26
- super().__init__(**kwargs)
27
-
28
-
29
- mambavisionT_config = MambaVisionConfig(depths=[1, 3, 8, 4],
30
- num_heads=[2, 4, 8, 16],
31
- window_size=[8, 8, 14, 7],
32
- dim=80,
33
- in_dim=32,
34
- mlp_ratio=4,
35
- drop_path_rate=0.2,)
36
-
37
- mambavisionT_config.save_pretrained("MambaVision-T-1K")
 
23
  self.in_dim = in_dim
24
  self.mlp_ratio = mlp_ratio
25
  self.drop_path_rate = drop_path_rate
26
+ super().__init__(**kwargs)
 
 
 
 
 
 
 
 
 
 
 
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:db9a49aabc94c9032bedecc485b64fe30dc5ab039af2ba2c277020b5f981ace2
3
  size 127219000
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ea57086055c06a6c2895ae7e7b4cd527ec74ab801d692cba7a8f7a0d88c95aea
3
  size 127219000
modeling_mambavision.py CHANGED
@@ -28,7 +28,10 @@ from einops import rearrange, repeat
28
 
29
  from transformers import PreTrainedModel
30
 
31
- from configuration_mambavision import MambaVisionConfig
 
 
 
32
 
33
 
34
  def _cfg(url='', **kwargs):
 
28
 
29
  from transformers import PreTrainedModel
30
 
31
+ try:
32
+ from .configuration_mambavision import MambaVisionConfig
33
+ except:
34
+ from configuration_mambavision import MambaVisionConfig
35
 
36
 
37
  def _cfg(url='', **kwargs):