Import Error
I got error as after import fairseq, please provide correct version of related libraries.
ValueError Traceback (most recent call last)
Cell In[5], line 3
1 import torch
2 from transformers import AutoModel
----> 3 model = AutoModel.from_pretrained(
4 # "scb10x/llama3.1-typhoon2-audio-8b-instruct",
5 "scb10x/llama3.1-typhoon2-audio-8b-instruct",
6 torch_dtype=torch.float16,
7 trust_remote_code=True
8 )
9 model.to("cuda")
File /opt/conda/lib/python3.11/site-packages/transformers/models/auto/auto_factory.py:553, in _BaseAutoModelClass.from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs)
551 if has_remote_code and trust_remote_code:
552 class_ref = config.auto_map[cls.name]
--> 553 model_class = get_class_from_dynamic_module(
554 class_ref, pretrained_model_name_or_path, code_revision=code_revision, **hub_kwargs, **kwargs
555 )
556 _ = hub_kwargs.pop("code_revision", None)
557 cls.register(config.class, model_class, exist_ok=True)
File /opt/conda/lib/python3.11/site-packages/transformers/dynamic_module_utils.py:541, in get_class_from_dynamic_module(class_reference, pretrained_model_name_or_path, cache_dir, force_download, resume_download, proxies, token, revision, local_files_only, repo_type, code_revision, **kwargs)
539 code_revision = revision
540 # And lastly we get the class inside our newly created module
--> 541 final_module = get_cached_module_file(
542 repo_id,
543 module_file + ".py",
544 cache_dir=cache_dir,
545 force_download=force_download,
546 resume_download=resume_download,
547 proxies=proxies,
548 token=token,
549 revision=code_revision,
550 local_files_only=local_files_only,
551 repo_type=repo_type,
552 )
553 return get_class_in_module(class_name, final_module, force_reload=force_download)
File /opt/conda/lib/python3.11/site-packages/transformers/dynamic_module_utils.py:366, in get_cached_module_file(pretrained_model_name_or_path, module_file, cache_dir, force_download, resume_download, proxies, token, revision, local_files_only, repo_type, _commit_hash, **deprecated_kwargs)
363 raise
365 # Check we have all the requirements in our environment
--> 366 modules_needed = check_imports(resolved_module_file)
368 # Now we move the module inside our cached dynamic modules.
369 full_submodule = TRANSFORMERS_DYNAMIC_MODULE_NAME + os.path.sep + submodule
File /opt/conda/lib/python3.11/site-packages/transformers/dynamic_module_utils.py:186, in check_imports(filename)
184 for imp in imports:
185 try:
--> 186 importlib.import_module(imp)
187 except ImportError as exception:
188 logger.warning(f"Encountered exception while importing {imp}: {exception}")
File /opt/conda/lib/python3.11/importlib/init.py:126, in import_module(name, package)
124 break
125 level += 1
--> 126 return _bootstrap._gcd_import(name[level:], package, level)
File :1204, in _gcd_import(name, package, level)
File :1176, in find_and_load(name, import)
File :1147, in find_and_load_unlocked(name, import)
File :690, in _load_unlocked(spec)
File :940, in exec_module(self, module)
File :241, in _call_with_frames_removed(f, *args, **kwds)
File /opt/conda/lib/python3.11/site-packages/fairseq/init.py:20
17 all = ["pdb"]
19 # backwards compatibility to support from fairseq.X import Y
---> 20 from fairseq.distributed import utils as distributed_utils
21 from fairseq.logging import meters, metrics, progress_bar # noqa
23 sys.modules["fairseq.distributed_utils"] = distributed_utils
File /opt/conda/lib/python3.11/site-packages/fairseq/distributed/init.py:7
1 # Copyright (c) Facebook, Inc. and its affiliates.
2 #
3 # This source code is licensed under the MIT license found in the
4 # LICENSE file in the root directory of this source tree.
6 from .distributed_timeout_wrapper import DistributedTimeoutWrapper
----> 7 from .fully_sharded_data_parallel import (
8 fsdp_enable_wrap,
9 fsdp_wrap,
10 FullyShardedDataParallel,
11 )
12 from .legacy_distributed_data_parallel import LegacyDistributedDataParallel
13 from .module_proxy_wrapper import ModuleProxyWrapper
File /opt/conda/lib/python3.11/site-packages/fairseq/distributed/fully_sharded_data_parallel.py:10
7 from typing import Optional
9 import torch
---> 10 from fairseq.dataclass.configs import DistributedTrainingConfig
11 from fairseq.distributed import utils as dist_utils
14 try:
File /opt/conda/lib/python3.11/site-packages/fairseq/dataclass/init.py:6
1 # Copyright (c) Facebook, Inc. and its affiliates.
2 #
3 # This source code is licensed under the MIT license found in the
4 # LICENSE file in the root directory of this source tree.
----> 6 from .configs import FairseqDataclass
7 from .constants import ChoiceEnum
10 all = [
11 "FairseqDataclass",
12 "ChoiceEnum",
13 ]
File /opt/conda/lib/python3.11/site-packages/fairseq/dataclass/configs.py:1104
1095 ema_update_freq: int = field(
1096 default=1, metadata={"help": "Do EMA update every this many model updates"}
1097 )
1098 ema_fp32: bool = field(
1099 default=False,
1100 metadata={"help": "If true, store EMA model in fp32 even if model is in fp16"},
1101 )
-> 1104 @dataclass
1105 class FairseqConfig(FairseqDataclass):
1106 common: CommonConfig = CommonConfig()
1107 common_eval: CommonEvalConfig = CommonEvalConfig()
File /opt/conda/lib/python3.11/dataclasses.py:1230, in dataclass(cls, init, repr, eq, order, unsafe_hash, frozen, match_args, kw_only, slots, weakref_slot)
1227 return wrap
1229 # We're called as @dataclass without parens.
-> 1230 return wrap(cls)
File /opt/conda/lib/python3.11/dataclasses.py:1220, in dataclass..wrap(cls)
1219 def wrap(cls):
-> 1220 return _process_class(cls, init, repr, eq, order, unsafe_hash,
1221 frozen, match_args, kw_only, slots,
1222 weakref_slot)
File /opt/conda/lib/python3.11/dataclasses.py:958, in _process_class(cls, init, repr, eq, order, unsafe_hash, frozen, match_args, kw_only, slots, weakref_slot)
955 kw_only = True
956 else:
957 # Otherwise it's a field of some type.
--> 958 cls_fields.append(_get_field(cls, name, type, kw_only))
960 for f in cls_fields:
961 fields[f.name] = f
File /opt/conda/lib/python3.11/dataclasses.py:815, in _get_field(cls, a_name, a_type, default_kw_only)
811 # For real fields, disallow mutable defaults. Use unhashable as a proxy
812 # indicator for mutability. Read the hash attribute from the class,
813 # not the instance.
814 if f._field_type is _FIELD and f.default.class.hash is None:
--> 815 raise ValueError(f'mutable default {type(f.default)} for field '
816 f'{f.name} is not allowed: use default_factory')
818 return f
ValueError: mutable default <class 'fairseq.dataclass.configs.CommonConfig'> for field common is not allowed: use default_factory
Fairseq have some problem with python3.11.
Could you please try using python3.10 and specified the following version of dependency and try again?
pip install pip==24.0
pip install transformers==4.45.2
pip install fairseq==0.12.2 # fairseq required pip==24.0 to install & only worked only on python 3.10
pip install flash-attn