runtime error
Exit code: 1. Reason: Traceback (most recent call last): File "/home/user/app/app.py", line 21, in <module> model = AutoModelForCausalLM.from_pretrained( File "/usr/local/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py", line 559, in from_pretrained return model_class.from_pretrained( File "/usr/local/lib/python3.10/site-packages/transformers/modeling_utils.py", line 4130, in from_pretrained model = cls(config, *model_args, **model_kwargs) File "/home/user/.cache/huggingface/modules/transformers_modules/BAAI/Bunny-v1_1-Llama-3-8B-V/dcb31dbc9a4dbe8a33401f11d26a7694f14012f9/modeling_bunny_llama.py", line 2860, in __init__ self.model = BunnyLlamaModel(config) File "/home/user/.cache/huggingface/modules/transformers_modules/BAAI/Bunny-v1_1-Llama-3-8B-V/dcb31dbc9a4dbe8a33401f11d26a7694f14012f9/modeling_bunny_llama.py", line 2852, in __init__ super(BunnyLlamaModel, self).__init__(config) File "/home/user/.cache/huggingface/modules/transformers_modules/BAAI/Bunny-v1_1-Llama-3-8B-V/dcb31dbc9a4dbe8a33401f11d26a7694f14012f9/modeling_bunny_llama.py", line 780, in __init__ super(BunnyMetaModel, self).__init__(config) File "/home/user/.cache/huggingface/modules/transformers_modules/BAAI/Bunny-v1_1-Llama-3-8B-V/dcb31dbc9a4dbe8a33401f11d26a7694f14012f9/modeling_bunny_llama.py", line 2198, in __init__ self.embed_tokens = nn.Embedding(config.vocab_size, config.hidden_size, self.padding_idx) File "/usr/local/lib/python3.10/site-packages/torch/nn/modules/sparse.py", line 167, in __init__ torch.empty((num_embeddings, embedding_dim), **factory_kwargs), File "/usr/local/lib/python3.10/site-packages/torch/utils/_device.py", line 106, in __torch_function__ return func(*args, **kwargs) File "/usr/local/lib/python3.10/site-packages/torch/cuda/__init__.py", line 319, in _lazy_init torch._C._cuda_init() RuntimeError: Found no NVIDIA driver on your system. Please check that you have an NVIDIA GPU and installed a driver from http://www.nvidia.com/Download/index.aspx
Container logs:
Fetching error logs...