Undefined Symbol Error
My pytorch version: 2.5.1
CUDA version: 12.6
{
"name": "RuntimeError",
"message": "Failed to import transformers.models.mamba.modeling_mamba because of the following error (look up to see its traceback):
/home/leon/tesis/spanish-legal-ir/myenv/lib/python3.10/site-packages/selective_scan_cuda.cpython-310-x86_64-linux-gnu.so: undefined symbol: _ZN3c104cuda9SetDeviceEi",
"stack": "---------------------------------------------------------------------------
ImportError Traceback (most recent call last)
File ~/tesis/spanish-legal-ir/myenv/lib/python3.10/site-packages/transformers/utils/import_utils.py:1788, in _LazyModule._get_module(self, module_name)
1787 try:
-> 1788 return importlib.import_module("." + module_name, self.name)
1789 except Exception as e:
File /usr/lib/python3.10/importlib/init.py:126, in import_module(name, package)
125 level += 1
--> 126 return _bootstrap._gcd_import(name[level:], package, level)
File :1050, in _gcd_import(name, package, level)
File :1027, in find_and_load(name, import)
File :1006, in find_and_load_unlocked(name, import)
File :688, in _load_unlocked(spec)
File :883, in exec_module(self, module)
File :241, in _call_with_frames_removed(f, *args, **kwds)
File ~/tesis/spanish-legal-ir/myenv/lib/python3.10/site-packages/transformers/models/mamba/modeling_mamba.py:49
48 if is_mamba_ssm_available():
---> 49 from mamba_ssm.ops.selective_scan_interface import mamba_inner_fn, selective_scan_fn
50 from mamba_ssm.ops.triton.selective_state_update import selective_state_update
File ~/tesis/spanish-legal-ir/myenv/lib/python3.10/site-packages/mamba_ssm/init.py:3
1 version = "2.2.2"
----> 3 from mamba_ssm.ops.selective_scan_interface import selective_scan_fn, mamba_inner_fn
4 from mamba_ssm.modules.mamba_simple import Mamba
File ~/tesis/spanish-legal-ir/myenv/lib/python3.10/site-packages/mamba_ssm/ops/selective_scan_interface.py:16
14 causal_conv1d_cuda = None
---> 16 import selective_scan_cuda
19 class SelectiveScanFn(torch.autograd.Function):
ImportError: /home/leon/tesis/spanish-legal-ir/myenv/lib/python3.10/site-packages/selective_scan_cuda.cpython-310-x86_64-linux-gnu.so: undefined symbol: _ZN3c104cuda9SetDeviceEi
The above exception was the direct cause of the following exception:
RuntimeError Traceback (most recent call last)
Cell In[3], line 3
1 # Load the pretrained tokenizer and model
2 tokenizer = AutoTokenizer.from_pretrained("state-spaces/mamba-790m-hf")
----> 3 model = AutoModelForCausalLM.from_pretrained("state-spaces/mamba-790m-hf")
File ~/tesis/spanish-legal-ir/myenv/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py:563, in _BaseAutoModelClass.from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs)
559 return model_class.from_pretrained(
560 pretrained_model_name_or_path, *model_args, config=config, **hub_kwargs, **kwargs
561 )
562 elif type(config) in cls._model_mapping.keys():
--> 563 model_class = _get_model_class(config, cls._model_mapping)
564 return model_class.from_pretrained(
565 pretrained_model_name_or_path, *model_args, config=config, **hub_kwargs, **kwargs
566 )
567 raise ValueError(
568 f"Unrecognized configuration class {config.class} for this kind of AutoModel: {cls.name}.
"
569 f"Model type should be one of {', '.join(c.name for c in cls._model_mapping.keys())}."
570 )
File ~/tesis/spanish-legal-ir/myenv/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py:388, in _get_model_class(config, model_mapping)
387 def _get_model_class(config, model_mapping):
--> 388 supported_models = model_mapping[type(config)]
389 if not isinstance(supported_models, (list, tuple)):
390 return supported_models
File ~/tesis/spanish-legal-ir/myenv/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py:763, in _LazyAutoMapping.getitem(self, key)
761 if model_type in self._model_mapping:
762 model_name = self._model_mapping[model_type]
--> 763 return self._load_attr_from_module(model_type, model_name)
765 # Maybe there was several model types associated with this config.
766 model_types = [k for k, v in self._config_mapping.items() if v == key.name]
File ~/tesis/spanish-legal-ir/myenv/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py:777, in _LazyAutoMapping._load_attr_from_module(self, model_type, attr)
775 if module_name not in self._modules:
776 self._modules[module_name] = importlib.import_module(f".{module_name}", "transformers.models")
--> 777 return getattribute_from_module(self._modules[module_name], attr)
File ~/tesis/spanish-legal-ir/myenv/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py:693, in getattribute_from_module(module, attr)
691 if isinstance(attr, tuple):
692 return tuple(getattribute_from_module(module, a) for a in attr)
--> 693 if hasattr(module, attr):
694 return getattr(module, attr)
695 # Some of the mappings have entries model_type -> object of another model type. In that case we try to grab the
696 # object at the top level.
File ~/tesis/spanish-legal-ir/myenv/lib/python3.10/site-packages/transformers/utils/import_utils.py:1776, in _LazyModule.getattr(self, name)
1774 value = Placeholder
1775 elif name in self._class_to_module.keys():
-> 1776 module = self._get_module(self._class_to_module[name])
1777 value = getattr(module, name)
1778 elif name in self._modules:
File ~/tesis/spanish-legal-ir/myenv/lib/python3.10/site-packages/transformers/utils/import_utils.py:1790, in _LazyModule._get_module(self, module_name)
1788 return importlib.import_module("." + module_name, self.name)
1789 except Exception as e:
-> 1790 raise RuntimeError(
1791 f"Failed to import {self.name}.{module_name} because of the following error (look up to see its"
1792 f" traceback):
{e}"
1793 ) from e
RuntimeError: Failed to import transformers.models.mamba.modeling_mamba because of the following error (look up to see its traceback):
/home/leon/tesis/spanish-legal-ir/myenv/lib/python3.10/site-packages/selective_scan_cuda.cpython-310-x86_64-linux-gnu.so: undefined symbol: _ZN3c104cuda9SetDeviceEi"
}