Can not load model

#19
by tridm - opened

Hi team,

I loaded your model following the instructions on huggingface, but got the error below:

ModuleNotFoundError                       Traceback (most recent call last)
File ~/envs/tridao/lib/python3.12/site-packages/transformers/utils/import_utils.py:2302, in _LazyModule.__getattr__(self, name)
   2301 try:
-> [2302](https://vscode-remote+tunnel-002bgpu-002dl4-002dshare-002dtridm.vscode-resource.vscode-cdn.net/home/jovyan/data-center/tridao/vs-code/VMLU/prepare_dataset/~/envs/tridao/lib/python3.12/site-packages/transformers/utils/import_utils.py:2302)     module = self._get_module(self._class_to_module[name])
   2303     value = getattr(module, name)

File ~/envs/tridao/lib/python3.12/site-packages/transformers/utils/import_utils.py:2332, in _LazyModule._get_module(self, module_name)
   2331 except Exception as e:
-> [2332](https://vscode-remote+tunnel-002bgpu-002dl4-002dshare-002dtridm.vscode-resource.vscode-cdn.net/home/jovyan/data-center/tridao/vs-code/VMLU/prepare_dataset/~/envs/tridao/lib/python3.12/site-packages/transformers/utils/import_utils.py:2332)     raise e

File ~/envs/tridao/lib/python3.12/site-packages/transformers/utils/import_utils.py:2330, in _LazyModule._get_module(self, module_name)
   2329 try:
-> [2330](https://vscode-remote+tunnel-002bgpu-002dl4-002dshare-002dtridm.vscode-resource.vscode-cdn.net/home/jovyan/data-center/tridao/vs-code/VMLU/prepare_dataset/~/envs/tridao/lib/python3.12/site-packages/transformers/utils/import_utils.py:2330)     return importlib.import_module("." + module_name, self.__name__)
   2331 except Exception as e:

File ~/envs/tridao/lib/python3.12/importlib/__init__.py:90, in import_module(name, package)
     89         level += 1
---> [90](https://vscode-remote+tunnel-002bgpu-002dl4-002dshare-002dtridm.vscode-resource.vscode-cdn.net/home/jovyan/data-center/tridao/vs-code/VMLU/prepare_dataset/~/envs/tridao/lib/python3.12/importlib/__init__.py:90) return _bootstrap._gcd_import(name[level:], package, level)

File <frozen importlib._bootstrap>:1387, in _gcd_import(name, package, level)

File <frozen importlib._bootstrap>:1360, in _find_and_load(name, import_)

File <frozen importlib._bootstrap>:1331, in _find_and_load_unlocked(name, import_)

File <frozen importlib._bootstrap>:935, in _load_unlocked(spec)

File <frozen importlib._bootstrap_external>:999, in exec_module(self, module)

File <frozen importlib._bootstrap>:488, in _call_with_frames_removed(f, *args, **kwds)

File ~/envs/tridao/lib/python3.12/site-packages/transformers/models/hunyuan_v1_dense/modeling_hunyuan_v1_dense.py:34
     33 from ...masking_utils import create_causal_mask
---> [34](https://vscode-remote+tunnel-002bgpu-002dl4-002dshare-002dtridm.vscode-resource.vscode-cdn.net/home/jovyan/data-center/tridao/vs-code/VMLU/prepare_dataset/~/envs/tridao/lib/python3.12/site-packages/transformers/models/hunyuan_v1_dense/modeling_hunyuan_v1_dense.py:34) from ...modeling_layers import GenericForSequenceClassification, GradientCheckpointingLayer
     35 from ...modeling_outputs import BaseModelOutputWithPast, CausalLMOutputWithPast

File ~/envs/tridao/lib/python3.12/site-packages/transformers/modeling_layers.py:28
     27 from .models.auto import AutoModel
---> [28](https://vscode-remote+tunnel-002bgpu-002dl4-002dshare-002dtridm.vscode-resource.vscode-cdn.net/home/jovyan/data-center/tridao/vs-code/VMLU/prepare_dataset/~/envs/tridao/lib/python3.12/site-packages/transformers/modeling_layers.py:28) from .processing_utils import Unpack
     29 from .utils import TransformersKwargs, auto_docstring, can_return_tuple, logging

File ~/envs/tridao/lib/python3.12/site-packages/transformers/processing_utils.py:34
     32 from huggingface_hub.errors import EntryNotFoundError
---> [34](https://vscode-remote+tunnel-002bgpu-002dl4-002dshare-002dtridm.vscode-resource.vscode-cdn.net/home/jovyan/data-center/tridao/vs-code/VMLU/prepare_dataset/~/envs/tridao/lib/python3.12/site-packages/transformers/processing_utils.py:34) from .audio_utils import load_audio
     35 from .dynamic_module_utils import custom_object_save

File ~/envs/tridao/lib/python3.12/site-packages/transformers/audio_utils.py:45
     44 if is_librosa_available():
---> [45](https://vscode-remote+tunnel-002bgpu-002dl4-002dshare-002dtridm.vscode-resource.vscode-cdn.net/home/jovyan/data-center/tridao/vs-code/VMLU/prepare_dataset/~/envs/tridao/lib/python3.12/site-packages/transformers/audio_utils.py:45)     import librosa
     47     # TODO: 

@eustlb
	, we actually don't need librosa but soxr is installed with librosa

File ~/.local/lib/python3.12/site-packages/librosa/__init__.py:212
      3 """
      4 Core IO and DSP
      5 ===============
   (...)    209     set_fftlib
    210 """
--> [212](https://vscode-remote+tunnel-002bgpu-002dl4-002dshare-002dtridm.vscode-resource.vscode-cdn.net/home/jovyan/data-center/tridao/vs-code/VMLU/prepare_dataset/~/.local/lib/python3.12/site-packages/librosa/__init__.py:212) import lazy_loader as lazy
    213 from .version import version as __version__

ModuleNotFoundError: No module named 'lazy_loader'

The above exception was the direct cause of the following exception:

ModuleNotFoundError                       Traceback (most recent call last)
Cell In[5], [line 8](vscode-notebook-cell:?execution_count=5&line=8)
      6 tokenizer = AutoTokenizer.from_pretrained(MODEL_ID, use_fast=True, padding_side='left')
      7     # Load model
----> [8](vscode-notebook-cell:?execution_count=5&line=8) model = AutoModelForCausalLM.from_pretrained(
      9     MODEL_ID,
     10     # dtype=DTYPE,
     11     quantization_config=quant_config,
     12     device_map=DEVICE,
     13     # attn_implementation="flash_attention_2",
     14     use_cache=True,
     15     trust_remote_code=True,
     16 )
     17 model = model.eval()

File ~/envs/tridao/lib/python3.12/site-packages/transformers/models/auto/auto_factory.py:601, in _BaseAutoModelClass.from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs)
    597     return model_class.from_pretrained(
    598         pretrained_model_name_or_path, *model_args, config=config, **hub_kwargs, **kwargs
    599     )
    600 elif type(config) in cls._model_mapping:
--> [601](https://vscode-remote+tunnel-002bgpu-002dl4-002dshare-002dtridm.vscode-resource.vscode-cdn.net/home/jovyan/data-center/tridao/vs-code/VMLU/prepare_dataset/~/envs/tridao/lib/python3.12/site-packages/transformers/models/auto/auto_factory.py:601)     model_class = _get_model_class(config, cls._model_mapping)
    602     if model_class.config_class == config.sub_configs.get("text_config", None):
    603         config = config.get_text_config()

File ~/envs/tridao/lib/python3.12/site-packages/transformers/models/auto/auto_factory.py:394, in _get_model_class(config, model_mapping)
    393 def _get_model_class(config, model_mapping):
--> [394](https://vscode-remote+tunnel-002bgpu-002dl4-002dshare-002dtridm.vscode-resource.vscode-cdn.net/home/jovyan/data-center/tridao/vs-code/VMLU/prepare_dataset/~/envs/tridao/lib/python3.12/site-packages/transformers/models/auto/auto_factory.py:394)     supported_models = model_mapping[type(config)]
    395     if not isinstance(supported_models, (list, tuple)):
    396         return supported_models

File ~/envs/tridao/lib/python3.12/site-packages/transformers/models/auto/auto_factory.py:807, in _LazyAutoMapping.__getitem__(self, key)
    805 if model_type in self._model_mapping:
    806     model_name = self._model_mapping[model_type]
--> [807](https://vscode-remote+tunnel-002bgpu-002dl4-002dshare-002dtridm.vscode-resource.vscode-cdn.net/home/jovyan/data-center/tridao/vs-code/VMLU/prepare_dataset/~/envs/tridao/lib/python3.12/site-packages/transformers/models/auto/auto_factory.py:807)     return self._load_attr_from_module(model_type, model_name)
    809 # Maybe there was several model types associated with this config.
    810 model_types = [k for k, v in self._config_mapping.items() if v == key.__name__]

File ~/envs/tridao/lib/python3.12/site-packages/transformers/models/auto/auto_factory.py:821, in _LazyAutoMapping._load_attr_from_module(self, model_type, attr)
    819 if module_name not in self._modules:
    820     self._modules[module_name] = importlib.import_module(f".{module_name}", "transformers.models")
--> [821](https://vscode-remote+tunnel-002bgpu-002dl4-002dshare-002dtridm.vscode-resource.vscode-cdn.net/home/jovyan/data-center/tridao/vs-code/VMLU/prepare_dataset/~/envs/tridao/lib/python3.12/site-packages/transformers/models/auto/auto_factory.py:821) return getattribute_from_module(self._modules[module_name], attr)

File ~/envs/tridao/lib/python3.12/site-packages/transformers/models/auto/auto_factory.py:733, in getattribute_from_module(module, attr)
    731 if isinstance(attr, tuple):
    732     return tuple(getattribute_from_module(module, a) for a in attr)
--> [733](https://vscode-remote+tunnel-002bgpu-002dl4-002dshare-002dtridm.vscode-resource.vscode-cdn.net/home/jovyan/data-center/tridao/vs-code/VMLU/prepare_dataset/~/envs/tridao/lib/python3.12/site-packages/transformers/models/auto/auto_factory.py:733) if hasattr(module, attr):
    734     return getattr(module, attr)
    735 # Some of the mappings have entries model_type -> object of another model type. In that case we try to grab the
    736 # object at the top level.

File ~/envs/tridao/lib/python3.12/site-packages/transformers/utils/import_utils.py:2305, in _LazyModule.__getattr__(self, name)
   2303         value = getattr(module, name)
   2304     except (ModuleNotFoundError, RuntimeError) as e:
-> [2305](https://vscode-remote+tunnel-002bgpu-002dl4-002dshare-002dtridm.vscode-resource.vscode-cdn.net/home/jovyan/data-center/tridao/vs-code/VMLU/prepare_dataset/~/envs/tridao/lib/python3.12/site-packages/transformers/utils/import_utils.py:2305)         raise ModuleNotFoundError(
   2306             f"Could not import module '{name}'. Are this object's requirements defined correctly?"
   2307         ) from e
   2309 elif name in self._modules:
   2310     try:

ModuleNotFoundError: Could not import module 'HunYuanDenseV1ForCausalLM'. Are this object's requirements defined correctly?

This is my load model code:

quant_config = BitsAndBytesConfig(
    load_in_8bit=True,
    llm_int8_threshold=6.0,
)

tokenizer = AutoTokenizer.from_pretrained(MODEL_ID, use_fast=True, padding_side='left')

model = AutoModelForCausalLM.from_pretrained(
    MODEL_ID,
    # dtype=DTYPE,
    quantization_config=quant_config,
    device_map=DEVICE,
    # attn_implementation="flash_attention_2",
    use_cache=True,
    trust_remote_code=True,
)
model = model.eval()

The version for my transformers library is 4.56.0 as suggested on huggingface.

Please help me fix this error

Sign up or log in to comment